Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_Mults, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtranspose, MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_Applypapt, MAT_Applypapt_numeric, MAT_Applypapt_symbolic, MAT_GetSequentialNonzeroStructure;
36: PetscLogEvent MAT_GetMultiProcBlock;
37: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
38: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
39: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
40: PetscLogEvent MAT_SetValuesBatch;
41: PetscLogEvent MAT_ViennaCLCopyToGPU;
42: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
43: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
44: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
45: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
46: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
48: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
50: /*@
51: MatSetRandom - Sets all components of a matrix to random numbers.
53: Logically Collective
55: Input Parameters:
56: + x - the matrix
57: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
58: it will create one internally.
60: Example:
61: .vb
62: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
63: MatSetRandom(x,rctx);
64: PetscRandomDestroy(rctx);
65: .ve
67: Level: intermediate
69: Notes:
70: For sparse matrices that have been preallocated but not been assembled it randomly selects appropriate locations,
72: for sparse matrices that already have locations it fills the locations with random numbers.
74: It generates an error if used on sparse matrices that have not been preallocated.
76: .seealso: [](chapter_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomCreate()`, `PetscRandomDestroy()`
77: @*/
78: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
79: {
80: PetscRandom randObj = NULL;
82: PetscFunctionBegin;
86: MatCheckPreallocated(x, 1);
88: if (!rctx) {
89: MPI_Comm comm;
90: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
91: PetscCall(PetscRandomCreate(comm, &randObj));
92: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
93: PetscCall(PetscRandomSetFromOptions(randObj));
94: rctx = randObj;
95: }
96: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
97: PetscUseTypeMethod(x, setrandom, rctx);
98: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
100: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
101: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(PetscRandomDestroy(&randObj));
103: PetscFunctionReturn(PETSC_SUCCESS);
104: }
106: /*@
107: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
109: Logically Collective
111: Input Parameter:
112: . mat - the factored matrix
114: Output Parameters:
115: + pivot - the pivot value computed
116: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
117: the share the matrix
119: Level: advanced
121: Notes:
122: This routine does not work for factorizations done with external packages.
124: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
126: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
128: .seealso: [](chapter_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`,
129: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
130: @*/
131: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
132: {
133: PetscFunctionBegin;
137: *pivot = mat->factorerror_zeropivot_value;
138: *row = mat->factorerror_zeropivot_row;
139: PetscFunctionReturn(PETSC_SUCCESS);
140: }
142: /*@
143: MatFactorGetError - gets the error code from a factorization
145: Logically Collective
147: Input Parameter:
148: . mat - the factored matrix
150: Output Parameter:
151: . err - the error code
153: Level: advanced
155: Note:
156: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
158: .seealso: [](chapter_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
159: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
160: @*/
161: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
162: {
163: PetscFunctionBegin;
166: *err = mat->factorerrortype;
167: PetscFunctionReturn(PETSC_SUCCESS);
168: }
170: /*@
171: MatFactorClearError - clears the error code in a factorization
173: Logically Collective
175: Input Parameter:
176: . mat - the factored matrix
178: Level: developer
180: Note:
181: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
183: .seealso: [](chapter_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
184: `MatGetErrorCode()`, `MatFactorError`
185: @*/
186: PetscErrorCode MatFactorClearError(Mat mat)
187: {
188: PetscFunctionBegin;
190: mat->factorerrortype = MAT_FACTOR_NOERROR;
191: mat->factorerror_zeropivot_value = 0.0;
192: mat->factorerror_zeropivot_row = 0;
193: PetscFunctionReturn(PETSC_SUCCESS);
194: }
196: PETSC_INTERN PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
197: {
198: Vec r, l;
199: const PetscScalar *al;
200: PetscInt i, nz, gnz, N, n;
202: PetscFunctionBegin;
203: PetscCall(MatCreateVecs(mat, &r, &l));
204: if (!cols) { /* nonzero rows */
205: PetscCall(MatGetSize(mat, &N, NULL));
206: PetscCall(MatGetLocalSize(mat, &n, NULL));
207: PetscCall(VecSet(l, 0.0));
208: PetscCall(VecSetRandom(r, NULL));
209: PetscCall(MatMult(mat, r, l));
210: PetscCall(VecGetArrayRead(l, &al));
211: } else { /* nonzero columns */
212: PetscCall(MatGetSize(mat, NULL, &N));
213: PetscCall(MatGetLocalSize(mat, NULL, &n));
214: PetscCall(VecSet(r, 0.0));
215: PetscCall(VecSetRandom(l, NULL));
216: PetscCall(MatMultTranspose(mat, l, r));
217: PetscCall(VecGetArrayRead(r, &al));
218: }
219: if (tol <= 0.0) {
220: for (i = 0, nz = 0; i < n; i++)
221: if (al[i] != 0.0) nz++;
222: } else {
223: for (i = 0, nz = 0; i < n; i++)
224: if (PetscAbsScalar(al[i]) > tol) nz++;
225: }
226: PetscCall(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
227: if (gnz != N) {
228: PetscInt *nzr;
229: PetscCall(PetscMalloc1(nz, &nzr));
230: if (nz) {
231: if (tol < 0) {
232: for (i = 0, nz = 0; i < n; i++)
233: if (al[i] != 0.0) nzr[nz++] = i;
234: } else {
235: for (i = 0, nz = 0; i < n; i++)
236: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i;
237: }
238: }
239: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
240: } else *nonzero = NULL;
241: if (!cols) { /* nonzero rows */
242: PetscCall(VecRestoreArrayRead(l, &al));
243: } else {
244: PetscCall(VecRestoreArrayRead(r, &al));
245: }
246: PetscCall(VecDestroy(&l));
247: PetscCall(VecDestroy(&r));
248: PetscFunctionReturn(PETSC_SUCCESS);
249: }
251: /*@
252: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
254: Input Parameter:
255: . A - the matrix
257: Output Parameter:
258: . keptrows - the rows that are not completely zero
260: Level: intermediate
262: Note:
263: `keptrows` is set to `NULL` if all rows are nonzero.
265: .seealso: [](chapter_matrices), `Mat`, `MatFindZeroRows()`
266: @*/
267: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
268: {
269: PetscFunctionBegin;
273: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
274: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
275: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
276: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
277: PetscFunctionReturn(PETSC_SUCCESS);
278: }
280: /*@
281: MatFindZeroRows - Locate all rows that are completely zero in the matrix
283: Input Parameter:
284: . A - the matrix
286: Output Parameter:
287: . zerorows - the rows that are completely zero
289: Level: intermediate
291: Note:
292: `zerorows` is set to `NULL` if no rows are zero.
294: .seealso: [](chapter_matrices), `Mat`, `MatFindNonzeroRows()`
295: @*/
296: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
297: {
298: IS keptrows;
299: PetscInt m, n;
301: PetscFunctionBegin;
305: PetscCall(MatFindNonzeroRows(mat, &keptrows));
306: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
307: In keeping with this convention, we set zerorows to NULL if there are no zero
308: rows. */
309: if (keptrows == NULL) {
310: *zerorows = NULL;
311: } else {
312: PetscCall(MatGetOwnershipRange(mat, &m, &n));
313: PetscCall(ISComplement(keptrows, m, n, zerorows));
314: PetscCall(ISDestroy(&keptrows));
315: }
316: PetscFunctionReturn(PETSC_SUCCESS);
317: }
319: /*@
320: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
322: Not Collective
324: Input Parameter:
325: . A - the matrix
327: Output Parameter:
328: . a - the diagonal part (which is a SEQUENTIAL matrix)
330: Level: advanced
332: Notes:
333: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
335: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
337: .seealso: [](chapter_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
338: @*/
339: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
340: {
341: PetscFunctionBegin;
345: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
346: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
347: else {
348: PetscMPIInt size;
350: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
351: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
352: *a = A;
353: }
354: PetscFunctionReturn(PETSC_SUCCESS);
355: }
357: /*@
358: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
360: Collective
362: Input Parameter:
363: . mat - the matrix
365: Output Parameter:
366: . trace - the sum of the diagonal entries
368: Level: advanced
370: .seealso: [](chapter_matrices), `Mat`
371: @*/
372: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
373: {
374: Vec diag;
376: PetscFunctionBegin;
379: PetscCall(MatCreateVecs(mat, &diag, NULL));
380: PetscCall(MatGetDiagonal(mat, diag));
381: PetscCall(VecSum(diag, trace));
382: PetscCall(VecDestroy(&diag));
383: PetscFunctionReturn(PETSC_SUCCESS);
384: }
386: /*@
387: MatRealPart - Zeros out the imaginary part of the matrix
389: Logically Collective
391: Input Parameter:
392: . mat - the matrix
394: Level: advanced
396: .seealso: [](chapter_matrices), `Mat`, `MatImaginaryPart()`
397: @*/
398: PetscErrorCode MatRealPart(Mat mat)
399: {
400: PetscFunctionBegin;
403: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
404: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
405: MatCheckPreallocated(mat, 1);
406: PetscUseTypeMethod(mat, realpart);
407: PetscFunctionReturn(PETSC_SUCCESS);
408: }
410: /*@C
411: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
413: Collective
415: Input Parameter:
416: . mat - the matrix
418: Output Parameters:
419: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each block)
420: - ghosts - the global indices of the ghost points
422: Level: advanced
424: Note:
425: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()`
427: .seealso: [](chapter_matrices), `Mat`, `VecCreateGhost()`
428: @*/
429: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
430: {
431: PetscFunctionBegin;
434: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
435: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
436: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
437: else {
438: if (nghosts) *nghosts = 0;
439: if (ghosts) *ghosts = NULL;
440: }
441: PetscFunctionReturn(PETSC_SUCCESS);
442: }
444: /*@
445: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
447: Logically Collective
449: Input Parameter:
450: . mat - the matrix
452: Level: advanced
454: .seealso: [](chapter_matrices), `Mat`, `MatRealPart()`
455: @*/
456: PetscErrorCode MatImaginaryPart(Mat mat)
457: {
458: PetscFunctionBegin;
461: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
462: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
463: MatCheckPreallocated(mat, 1);
464: PetscUseTypeMethod(mat, imaginarypart);
465: PetscFunctionReturn(PETSC_SUCCESS);
466: }
468: /*@
469: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices)
471: Not Collective
473: Input Parameter:
474: . mat - the matrix
476: Output Parameters:
477: + missing - is any diagonal missing
478: - dd - first diagonal entry that is missing (optional) on this process
480: Level: advanced
482: .seealso: [](chapter_matrices), `Mat`
483: @*/
484: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
485: {
486: PetscFunctionBegin;
490: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
491: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
492: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
493: PetscFunctionReturn(PETSC_SUCCESS);
494: }
496: /*@C
497: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
498: for each row that you get to ensure that your application does
499: not bleed memory.
501: Not Collective
503: Input Parameters:
504: + mat - the matrix
505: - row - the row to get
507: Output Parameters:
508: + ncols - if not `NULL`, the number of nonzeros in the row
509: . cols - if not `NULL`, the column numbers
510: - vals - if not `NULL`, the values
512: Level: advanced
514: Notes:
515: This routine is provided for people who need to have direct access
516: to the structure of a matrix. We hope that we provide enough
517: high-level matrix routines that few users will need it.
519: `MatGetRow()` always returns 0-based column indices, regardless of
520: whether the internal representation is 0-based (default) or 1-based.
522: For better efficiency, set cols and/or vals to `NULL` if you do
523: not wish to extract these quantities.
525: The user can only examine the values extracted with `MatGetRow()`;
526: the values cannot be altered. To change the matrix entries, one
527: must use `MatSetValues()`.
529: You can only have one call to `MatGetRow()` outstanding for a particular
530: matrix at a time, per processor. `MatGetRow()` can only obtain rows
531: associated with the given processor, it cannot get rows from the
532: other processors; for that we suggest using `MatCreateSubMatrices()`, then
533: MatGetRow() on the submatrix. The row index passed to `MatGetRow()`
534: is in the global number of rows.
536: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
538: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
540: Fortran Note:
541: The calling sequence is
542: .vb
543: MatGetRow(matrix,row,ncols,cols,values,ierr)
544: Mat matrix (input)
545: integer row (input)
546: integer ncols (output)
547: integer cols(maxcols) (output)
548: double precision (or double complex) values(maxcols) output
549: .ve
550: where maxcols >= maximum nonzeros in any row of the matrix.
552: Caution:
553: Do not try to change the contents of the output arrays (`cols` and `vals`).
554: In some cases, this may corrupt the matrix.
556: .seealso: [](chapter_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
557: @*/
558: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
559: {
560: PetscInt incols;
562: PetscFunctionBegin;
565: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
566: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
567: MatCheckPreallocated(mat, 1);
568: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
569: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
570: PetscCall((*mat->ops->getrow)(mat, row, &incols, (PetscInt **)cols, (PetscScalar **)vals));
571: if (ncols) *ncols = incols;
572: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
573: PetscFunctionReturn(PETSC_SUCCESS);
574: }
576: /*@
577: MatConjugate - replaces the matrix values with their complex conjugates
579: Logically Collective
581: Input Parameter:
582: . mat - the matrix
584: Level: advanced
586: .seealso: [](chapter_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
587: @*/
588: PetscErrorCode MatConjugate(Mat mat)
589: {
590: PetscFunctionBegin;
592: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
593: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
594: PetscUseTypeMethod(mat, conjugate);
595: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
596: }
597: PetscFunctionReturn(PETSC_SUCCESS);
598: }
600: /*@C
601: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
603: Not Collective
605: Input Parameters:
606: + mat - the matrix
607: . row - the row to get
608: . ncols - the number of nonzeros
609: . cols - the columns of the nonzeros
610: - vals - if nonzero the column values
612: Level: advanced
614: Notes:
615: This routine should be called after you have finished examining the entries.
617: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
618: us of the array after it has been restored. If you pass `NULL`, it will
619: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
621: Fortran Notes:
622: The calling sequence is
623: .vb
624: MatRestoreRow(matrix,row,ncols,cols,values,ierr)
625: Mat matrix (input)
626: integer row (input)
627: integer ncols (output)
628: integer cols(maxcols) (output)
629: double precision (or double complex) values(maxcols) output
630: .ve
631: Where maxcols >= maximum nonzeros in any row of the matrix.
633: In Fortran `MatRestoreRow()` MUST be called after `MatGetRow()`
634: before another call to `MatGetRow()` can be made.
636: .seealso: [](chapter_matrices), `Mat`, `MatGetRow()`
637: @*/
638: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
639: {
640: PetscFunctionBegin;
643: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
644: if (!mat->ops->restorerow) PetscFunctionReturn(PETSC_SUCCESS);
645: PetscCall((*mat->ops->restorerow)(mat, row, ncols, (PetscInt **)cols, (PetscScalar **)vals));
646: if (ncols) *ncols = 0;
647: if (cols) *cols = NULL;
648: if (vals) *vals = NULL;
649: PetscFunctionReturn(PETSC_SUCCESS);
650: }
652: /*@
653: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
654: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
656: Not Collective
658: Input Parameter:
659: . mat - the matrix
661: Level: advanced
663: Note:
664: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
666: .seealso: [](chapter_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
667: @*/
668: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
669: {
670: PetscFunctionBegin;
673: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
674: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
675: MatCheckPreallocated(mat, 1);
676: if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
677: PetscUseTypeMethod(mat, getrowuppertriangular);
678: PetscFunctionReturn(PETSC_SUCCESS);
679: }
681: /*@
682: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
684: Not Collective
686: Input Parameter:
687: . mat - the matrix
689: Level: advanced
691: Note:
692: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
694: .seealso: [](chapter_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
695: @*/
696: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
697: {
698: PetscFunctionBegin;
701: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
702: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
703: MatCheckPreallocated(mat, 1);
704: if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
705: PetscUseTypeMethod(mat, restorerowuppertriangular);
706: PetscFunctionReturn(PETSC_SUCCESS);
707: }
709: /*@C
710: MatSetOptionsPrefix - Sets the prefix used for searching for all
711: `Mat` options in the database.
713: Logically Collective
715: Input Parameters:
716: + A - the matrix
717: - prefix - the prefix to prepend to all option names
719: Level: advanced
721: Notes:
722: A hyphen (-) must NOT be given at the beginning of the prefix name.
723: The first character of all runtime options is AUTOMATICALLY the hyphen.
725: This is NOT used for options for the factorization of the matrix. Normally the
726: prefix is automatically passed in from the PC calling the factorization. To set
727: it directly use `MatSetOptionsPrefixFactor()`
729: .seealso: [](chapter_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
730: @*/
731: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
732: {
733: PetscFunctionBegin;
735: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
736: PetscFunctionReturn(PETSC_SUCCESS);
737: }
739: /*@C
740: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
741: for matrices created with `MatGetFactor()`
743: Logically Collective
745: Input Parameters:
746: + A - the matrix
747: - prefix - the prefix to prepend to all option names for the factored matrix
749: Level: developer
751: Notes:
752: A hyphen (-) must NOT be given at the beginning of the prefix name.
753: The first character of all runtime options is AUTOMATICALLY the hyphen.
755: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
756: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
758: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
759: @*/
760: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
761: {
762: PetscFunctionBegin;
764: if (prefix) {
766: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
767: if (prefix != A->factorprefix) {
768: PetscCall(PetscFree(A->factorprefix));
769: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
770: }
771: } else PetscCall(PetscFree(A->factorprefix));
772: PetscFunctionReturn(PETSC_SUCCESS);
773: }
775: /*@C
776: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
777: for matrices created with `MatGetFactor()`
779: Logically Collective
781: Input Parameters:
782: + A - the matrix
783: - prefix - the prefix to prepend to all option names for the factored matrix
785: Level: developer
787: Notes:
788: A hyphen (-) must NOT be given at the beginning of the prefix name.
789: The first character of all runtime options is AUTOMATICALLY the hyphen.
791: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
792: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
794: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
795: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
796: `MatSetOptionsPrefix()`
797: @*/
798: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
799: {
800: size_t len1, len2, new_len;
802: PetscFunctionBegin;
804: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
805: if (!A->factorprefix) {
806: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
807: PetscFunctionReturn(PETSC_SUCCESS);
808: }
809: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
811: PetscCall(PetscStrlen(A->factorprefix, &len1));
812: PetscCall(PetscStrlen(prefix, &len2));
813: new_len = len1 + len2 + 1;
814: PetscCall(PetscRealloc(new_len * sizeof(*(A->factorprefix)), &A->factorprefix));
815: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
816: PetscFunctionReturn(PETSC_SUCCESS);
817: }
819: /*@C
820: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
821: matrix options in the database.
823: Logically Collective
825: Input Parameters:
826: + A - the matrix
827: - prefix - the prefix to prepend to all option names
829: Level: advanced
831: Note:
832: A hyphen (-) must NOT be given at the beginning of the prefix name.
833: The first character of all runtime options is AUTOMATICALLY the hyphen.
835: .seealso: [](chapter_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
836: @*/
837: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
838: {
839: PetscFunctionBegin;
841: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
842: PetscFunctionReturn(PETSC_SUCCESS);
843: }
845: /*@C
846: MatGetOptionsPrefix - Gets the prefix used for searching for all
847: matrix options in the database.
849: Not Collective
851: Input Parameter:
852: . A - the matrix
854: Output Parameter:
855: . prefix - pointer to the prefix string used
857: Level: advanced
859: Fortran Note:
860: The user should pass in a string `prefix` of
861: sufficient length to hold the prefix.
863: .seealso: [](chapter_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
864: @*/
865: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
866: {
867: PetscFunctionBegin;
870: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
871: PetscFunctionReturn(PETSC_SUCCESS);
872: }
874: /*@
875: MatResetPreallocation - Reset matrix to use the original nonzero pattern provided by users.
877: Collective
879: Input Parameter:
880: . A - the matrix
882: Level: beginner
884: Notes:
885: The allocated memory will be shrunk after calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
887: Users can reset the preallocation to access the original memory.
889: Currently only supported for `MATAIJ` matrices.
891: .seealso: [](chapter_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
892: @*/
893: PetscErrorCode MatResetPreallocation(Mat A)
894: {
895: PetscFunctionBegin;
898: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
899: PetscFunctionReturn(PETSC_SUCCESS);
900: }
902: /*@
903: MatSetUp - Sets up the internal matrix data structures for later use.
905: Collective
907: Input Parameter:
908: . A - the matrix
910: Level: intermediate
912: Notes:
913: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
914: setting values in the matrix.
916: If a suitable preallocation routine is used, this function does not need to be called.
918: This routine is called internally by other matrix functions when needed so rarely needs to be called by users
920: .seealso: [](chapter_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
921: @*/
922: PetscErrorCode MatSetUp(Mat A)
923: {
924: PetscFunctionBegin;
926: if (!((PetscObject)A)->type_name) {
927: PetscMPIInt size;
929: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
930: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
931: }
932: if (!A->preallocated) PetscTryTypeMethod(A, setup);
933: PetscCall(PetscLayoutSetUp(A->rmap));
934: PetscCall(PetscLayoutSetUp(A->cmap));
935: A->preallocated = PETSC_TRUE;
936: PetscFunctionReturn(PETSC_SUCCESS);
937: }
939: #if defined(PETSC_HAVE_SAWS)
940: #include <petscviewersaws.h>
941: #endif
943: /*@C
944: MatViewFromOptions - View properties of the matrix based on options set in the options database
946: Collective
948: Input Parameters:
949: + A - the matrix
950: . obj - optional additional object that provides the options prefix to use
951: - name - command line option
953: Options Database Key:
954: . -mat_view [viewertype]:... - the viewer and its options
956: Level: intermediate
958: Notes:
959: .vb
960: If no value is provided ascii:stdout is used
961: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
962: for example ascii::ascii_info prints just the information about the object not all details
963: unless :append is given filename opens in write mode, overwriting what was already there
964: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
965: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
966: socket[:port] defaults to the standard output port
967: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
968: .ve
970: .seealso: [](chapter_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
971: @*/
972: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
973: {
974: PetscFunctionBegin;
976: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
977: PetscFunctionReturn(PETSC_SUCCESS);
978: }
980: /*@C
981: MatView - display information about a matrix in a variety ways
983: Collective
985: Input Parameters:
986: + mat - the matrix
987: - viewer - visualization context
989: Options Database Keys:
990: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
991: . -mat_view ::ascii_info_detail - Prints more detailed info
992: . -mat_view - Prints matrix in ASCII format
993: . -mat_view ::ascii_matlab - Prints matrix in Matlab format
994: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
995: . -display <name> - Sets display name (default is host)
996: . -draw_pause <sec> - Sets number of seconds to pause after display
997: . -mat_view socket - Sends matrix to socket, can be accessed from Matlab (see Users-Manual: ch_matlab for details)
998: . -viewer_socket_machine <machine> -
999: . -viewer_socket_port <port> -
1000: . -mat_view binary - save matrix to file in binary format
1001: - -viewer_binary_filename <name> -
1003: Level: beginner
1005: Notes:
1006: The available visualization contexts include
1007: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1008: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1009: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1010: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1012: The user can open alternative visualization contexts with
1013: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1014: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a
1015: specified file; corresponding input uses MatLoad()
1016: . `PetscViewerDrawOpen()` - Outputs nonzero matrix structure to
1017: an X window display
1018: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer.
1019: Currently only the sequential dense and AIJ
1020: matrix types support the Socket viewer.
1022: The user can call `PetscViewerPushFormat()` to specify the output
1023: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1024: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1025: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1026: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in Matlab format
1027: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1028: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse
1029: format common among all matrix types
1030: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific
1031: format (which is in many cases the same as the default)
1032: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix
1033: size and structure (not the matrix entries)
1034: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about
1035: the matrix structure
1037: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1038: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1040: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1042: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1043: viewer is used.
1045: See share/petsc/matlab/PetscBinaryRead.m for a Matlab code that can read in the binary file when the binary
1046: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1048: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1049: and then use the following mouse functions.
1050: .vb
1051: left mouse: zoom in
1052: middle mouse: zoom out
1053: right mouse: continue with the simulation
1054: .ve
1056: .seealso: [](chapter_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1057: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1058: @*/
1059: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1060: {
1061: PetscInt rows, cols, rbs, cbs;
1062: PetscBool isascii, isstring, issaws;
1063: PetscViewerFormat format;
1064: PetscMPIInt size;
1066: PetscFunctionBegin;
1069: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1071: PetscCheckSameComm(mat, 1, viewer, 2);
1073: PetscCall(PetscViewerGetFormat(viewer, &format));
1074: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
1075: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1077: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1078: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1079: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1080: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1082: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1083: if (isascii) {
1084: if (!mat->preallocated) {
1085: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1086: PetscFunctionReturn(PETSC_SUCCESS);
1087: }
1088: if (!mat->assembled) {
1089: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1090: PetscFunctionReturn(PETSC_SUCCESS);
1091: }
1092: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1093: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1094: MatNullSpace nullsp, transnullsp;
1096: PetscCall(PetscViewerASCIIPushTab(viewer));
1097: PetscCall(MatGetSize(mat, &rows, &cols));
1098: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1099: if (rbs != 1 || cbs != 1) {
1100: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "\n", rows, cols, rbs, cbs));
1101: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "\n", rows, cols, rbs));
1102: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1103: if (mat->factortype) {
1104: MatSolverType solver;
1105: PetscCall(MatFactorGetSolverType(mat, &solver));
1106: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1107: }
1108: if (mat->ops->getinfo) {
1109: MatInfo info;
1110: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1111: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1112: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1113: }
1114: PetscCall(MatGetNullSpace(mat, &nullsp));
1115: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1116: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1117: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1118: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1119: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1120: PetscCall(PetscViewerASCIIPushTab(viewer));
1121: PetscCall(MatProductView(mat, viewer));
1122: PetscCall(PetscViewerASCIIPopTab(viewer));
1123: }
1124: } else if (issaws) {
1125: #if defined(PETSC_HAVE_SAWS)
1126: PetscMPIInt rank;
1128: PetscCall(PetscObjectName((PetscObject)mat));
1129: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1130: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1131: #endif
1132: } else if (isstring) {
1133: const char *type;
1134: PetscCall(MatGetType(mat, &type));
1135: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1136: PetscTryTypeMethod(mat, view, viewer);
1137: }
1138: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1139: PetscCall(PetscViewerASCIIPushTab(viewer));
1140: PetscUseTypeMethod(mat, viewnative, viewer);
1141: PetscCall(PetscViewerASCIIPopTab(viewer));
1142: } else if (mat->ops->view) {
1143: PetscCall(PetscViewerASCIIPushTab(viewer));
1144: PetscUseTypeMethod(mat, view, viewer);
1145: PetscCall(PetscViewerASCIIPopTab(viewer));
1146: }
1147: if (isascii) {
1148: PetscCall(PetscViewerGetFormat(viewer, &format));
1149: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1150: }
1151: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1152: PetscFunctionReturn(PETSC_SUCCESS);
1153: }
1155: #if defined(PETSC_USE_DEBUG)
1156: #include <../src/sys/totalview/tv_data_display.h>
1157: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1158: {
1159: TV_add_row("Local rows", "int", &mat->rmap->n);
1160: TV_add_row("Local columns", "int", &mat->cmap->n);
1161: TV_add_row("Global rows", "int", &mat->rmap->N);
1162: TV_add_row("Global columns", "int", &mat->cmap->N);
1163: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1164: return TV_format_OK;
1165: }
1166: #endif
1168: /*@C
1169: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1170: with `MatView()`. The matrix format is determined from the options database.
1171: Generates a parallel MPI matrix if the communicator has more than one
1172: processor. The default matrix type is `MATAIJ`.
1174: Collective
1176: Input Parameters:
1177: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1178: or some related function before a call to `MatLoad()`
1179: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1181: Options Database Keys:
1182: Used with block matrix formats (`MATSEQBAIJ`, ...) to specify
1183: block size
1184: . -matload_block_size <bs> - set block size
1186: Level: beginner
1188: Notes:
1189: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1190: `Mat` before calling this routine if you wish to set it from the options database.
1192: `MatLoad()` automatically loads into the options database any options
1193: given in the file filename.info where filename is the name of the file
1194: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1195: file will be ignored if you use the -viewer_binary_skip_info option.
1197: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1198: sets the default matrix type AIJ and sets the local and global sizes.
1199: If type and/or size is already set, then the same are used.
1201: In parallel, each processor can load a subset of rows (or the
1202: entire matrix). This routine is especially useful when a large
1203: matrix is stored on disk and only part of it is desired on each
1204: processor. For example, a parallel solver may access only some of
1205: the rows from each processor. The algorithm used here reads
1206: relatively small blocks of data rather than reading the entire
1207: matrix and then subsetting it.
1209: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1210: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1211: or the sequence like
1212: .vb
1213: `PetscViewer` v;
1214: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1215: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1216: `PetscViewerSetFromOptions`(v);
1217: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1218: `PetscViewerFileSetName`(v,"datafile");
1219: .ve
1220: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1221: $ -viewer_type {binary,hdf5}
1223: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1224: and src/mat/tutorials/ex10.c with the second approach.
1226: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1227: is read onto rank 0 and then shipped to its destination rank, one after another.
1228: Multiple objects, both matrices and vectors, can be stored within the same file.
1229: Their PetscObject name is ignored; they are loaded in the order of their storage.
1231: Most users should not need to know the details of the binary storage
1232: format, since `MatLoad()` and `MatView()` completely hide these details.
1233: But for anyone who's interested, the standard binary matrix storage
1234: format is
1236: .vb
1237: PetscInt MAT_FILE_CLASSID
1238: PetscInt number of rows
1239: PetscInt number of columns
1240: PetscInt total number of nonzeros
1241: PetscInt *number nonzeros in each row
1242: PetscInt *column indices of all nonzeros (starting index is zero)
1243: PetscScalar *values of all nonzeros
1244: .ve
1246: PETSc automatically does the byte swapping for
1247: machines that store the bytes reversed. Thus if you write your own binary
1248: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1249: and `PetscBinaryWrite()` to see how this may be done.
1251: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1252: Each processor's chunk is loaded independently by its owning rank.
1253: Multiple objects, both matrices and vectors, can be stored within the same file.
1254: They are looked up by their PetscObject name.
1256: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1257: by default the same structure and naming of the AIJ arrays and column count
1258: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1259: $ save example.mat A b -v7.3
1260: can be directly read by this routine (see Reference 1 for details).
1262: Depending on your MATLAB version, this format might be a default,
1263: otherwise you can set it as default in Preferences.
1265: Unless -nocompression flag is used to save the file in MATLAB,
1266: PETSc must be configured with ZLIB package.
1268: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1270: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1272: Corresponding `MatView()` is not yet implemented.
1274: The loaded matrix is actually a transpose of the original one in MATLAB,
1275: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1276: With this format, matrix is automatically transposed by PETSc,
1277: unless the matrix is marked as SPD or symmetric
1278: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1280: References:
1281: . * - MATLAB(R) Documentation, manual page of save(), https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version
1283: .seealso: [](chapter_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1284: @*/
1285: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1286: {
1287: PetscBool flg;
1289: PetscFunctionBegin;
1293: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1295: flg = PETSC_FALSE;
1296: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1297: if (flg) {
1298: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1299: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1300: }
1301: flg = PETSC_FALSE;
1302: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1303: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1305: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1306: PetscUseTypeMethod(mat, load, viewer);
1307: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1308: PetscFunctionReturn(PETSC_SUCCESS);
1309: }
1311: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1312: {
1313: Mat_Redundant *redund = *redundant;
1315: PetscFunctionBegin;
1316: if (redund) {
1317: if (redund->matseq) { /* via MatCreateSubMatrices() */
1318: PetscCall(ISDestroy(&redund->isrow));
1319: PetscCall(ISDestroy(&redund->iscol));
1320: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1321: } else {
1322: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1323: PetscCall(PetscFree(redund->sbuf_j));
1324: PetscCall(PetscFree(redund->sbuf_a));
1325: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1326: PetscCall(PetscFree(redund->rbuf_j[i]));
1327: PetscCall(PetscFree(redund->rbuf_a[i]));
1328: }
1329: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1330: }
1332: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1333: PetscCall(PetscFree(redund));
1334: }
1335: PetscFunctionReturn(PETSC_SUCCESS);
1336: }
1338: /*@C
1339: MatDestroy - Frees space taken by a matrix.
1341: Collective
1343: Input Parameter:
1344: . A - the matrix
1346: Level: beginner
1348: Developer Note:
1349: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1350: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1351: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1352: if changes are needed here.
1354: .seealso: [](chapter_matrices), `Mat`, `MatCreate()`
1355: @*/
1356: PetscErrorCode MatDestroy(Mat *A)
1357: {
1358: PetscFunctionBegin;
1359: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1361: if (--((PetscObject)(*A))->refct > 0) {
1362: *A = NULL;
1363: PetscFunctionReturn(PETSC_SUCCESS);
1364: }
1366: /* if memory was published with SAWs then destroy it */
1367: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1368: PetscTryTypeMethod((*A), destroy);
1370: PetscCall(PetscFree((*A)->factorprefix));
1371: PetscCall(PetscFree((*A)->defaultvectype));
1372: PetscCall(PetscFree((*A)->defaultrandtype));
1373: PetscCall(PetscFree((*A)->bsizes));
1374: PetscCall(PetscFree((*A)->solvertype));
1375: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1376: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1377: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1378: PetscCall(MatProductClear(*A));
1379: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1380: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1381: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1382: PetscCall(MatDestroy(&(*A)->schur));
1383: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1384: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1385: PetscCall(PetscHeaderDestroy(A));
1386: PetscFunctionReturn(PETSC_SUCCESS);
1387: }
1389: /*@C
1390: MatSetValues - Inserts or adds a block of values into a matrix.
1391: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1392: MUST be called after all calls to `MatSetValues()` have been completed.
1394: Not Collective
1396: Input Parameters:
1397: + mat - the matrix
1398: . v - a logically two-dimensional array of values
1399: . m - the number of rows
1400: . idxm - the global indices of the rows
1401: . n - the number of columns
1402: . idxn - the global indices of the columns
1403: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1405: Level: beginner
1407: Notes:
1408: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1410: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1411: options cannot be mixed without intervening calls to the assembly
1412: routines.
1414: `MatSetValues()` uses 0-based row and column numbers in Fortran
1415: as well as in C.
1417: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1418: simply ignored. This allows easily inserting element stiffness matrices
1419: with homogeneous Dirchlet boundary conditions that you don't want represented
1420: in the matrix.
1422: Efficiency Alert:
1423: The routine `MatSetValuesBlocked()` may offer much better efficiency
1424: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1426: Developer Note:
1427: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1428: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1430: .seealso: [](chapter_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1431: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1432: @*/
1433: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1434: {
1435: PetscFunctionBeginHot;
1438: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1441: MatCheckPreallocated(mat, 1);
1443: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1444: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1446: if (PetscDefined(USE_DEBUG)) {
1447: PetscInt i, j;
1449: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1450: for (i = 0; i < m; i++) {
1451: for (j = 0; j < n; j++) {
1452: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1453: #if defined(PETSC_USE_COMPLEX)
1454: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1455: #else
1456: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1457: #endif
1458: }
1459: }
1460: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1461: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1462: }
1464: if (mat->assembled) {
1465: mat->was_assembled = PETSC_TRUE;
1466: mat->assembled = PETSC_FALSE;
1467: }
1468: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1469: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1470: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1471: PetscFunctionReturn(PETSC_SUCCESS);
1472: }
1474: /*@C
1475: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1476: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1477: MUST be called after all calls to `MatSetValues()` have been completed.
1479: Not Collective
1481: Input Parameters:
1482: + mat - the matrix
1483: . v - a logically two-dimensional array of values
1484: . ism - the rows to provide
1485: . isn - the columns to provide
1486: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1488: Level: beginner
1490: Notes:
1491: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1493: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1494: options cannot be mixed without intervening calls to the assembly
1495: routines.
1497: `MatSetValues()` uses 0-based row and column numbers in Fortran
1498: as well as in C.
1500: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1501: simply ignored. This allows easily inserting element stiffness matrices
1502: with homogeneous Dirchlet boundary conditions that you don't want represented
1503: in the matrix.
1505: Efficiency Alert:
1506: The routine `MatSetValuesBlocked()` may offer much better efficiency
1507: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1509: This is currently not optimized for any particular `ISType`
1511: Developer Notes:
1512: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1513: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1515: .seealso: [](chapter_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1516: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`
1517: @*/
1518: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1519: {
1520: PetscInt m, n;
1521: const PetscInt *rows, *cols;
1523: PetscFunctionBeginHot;
1525: PetscCall(ISGetIndices(ism, &rows));
1526: PetscCall(ISGetIndices(isn, &cols));
1527: PetscCall(ISGetLocalSize(ism, &m));
1528: PetscCall(ISGetLocalSize(isn, &n));
1529: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1530: PetscCall(ISRestoreIndices(ism, &rows));
1531: PetscCall(ISRestoreIndices(isn, &cols));
1532: PetscFunctionReturn(PETSC_SUCCESS);
1533: }
1535: /*@
1536: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1537: values into a matrix
1539: Not Collective
1541: Input Parameters:
1542: + mat - the matrix
1543: . row - the (block) row to set
1544: - v - a logically two-dimensional array of values
1546: Level: intermediate
1548: Notes:
1549: The values, `v`, are column-oriented (for the block version) and sorted
1551: All the nonzeros in the row must be provided
1553: The matrix must have previously had its column indices set, likely by having been assembled.
1555: The row must belong to this process
1557: .seealso: [](chapter_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1558: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1559: @*/
1560: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1561: {
1562: PetscInt globalrow;
1564: PetscFunctionBegin;
1568: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1569: PetscCall(MatSetValuesRow(mat, globalrow, v));
1570: PetscFunctionReturn(PETSC_SUCCESS);
1571: }
1573: /*@
1574: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1575: values into a matrix
1577: Not Collective
1579: Input Parameters:
1580: + mat - the matrix
1581: . row - the (block) row to set
1582: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1584: Level: advanced
1586: Notes:
1587: The values, `v`, are column-oriented for the block version.
1589: All the nonzeros in the row must be provided
1591: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1593: The row must belong to this process
1595: .seealso: [](chapter_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1596: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`
1597: @*/
1598: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1599: {
1600: PetscFunctionBeginHot;
1603: MatCheckPreallocated(mat, 1);
1605: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1606: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1607: mat->insertmode = INSERT_VALUES;
1609: if (mat->assembled) {
1610: mat->was_assembled = PETSC_TRUE;
1611: mat->assembled = PETSC_FALSE;
1612: }
1613: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1614: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1615: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1616: PetscFunctionReturn(PETSC_SUCCESS);
1617: }
1619: /*@
1620: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1621: Using structured grid indexing
1623: Not Collective
1625: Input Parameters:
1626: + mat - the matrix
1627: . m - number of rows being entered
1628: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1629: . n - number of columns being entered
1630: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1631: . v - a logically two-dimensional array of values
1632: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1634: Level: beginner
1636: Notes:
1637: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1639: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1640: options cannot be mixed without intervening calls to the assembly
1641: routines.
1643: The grid coordinates are across the entire grid, not just the local portion
1645: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1646: as well as in C.
1648: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1650: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1651: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1653: The columns and rows in the stencil passed in MUST be contained within the
1654: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1655: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1656: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1657: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1659: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1660: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1661: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1662: `DM_BOUNDARY_PERIODIC` boundary type.
1664: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1665: a single value per point) you can skip filling those indices.
1667: Inspired by the structured grid interface to the HYPRE package
1668: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1670: Efficiency Alert:
1671: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1672: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1674: Fortran Note:
1675: `idxm` and `idxn` should be declared as
1676: $ MatStencil idxm(4,m),idxn(4,n)
1677: and the values inserted using
1678: .vb
1679: idxm(MatStencil_i,1) = i
1680: idxm(MatStencil_j,1) = j
1681: idxm(MatStencil_k,1) = k
1682: idxm(MatStencil_c,1) = c
1683: etc
1684: .ve
1686: .seealso: [](chapter_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1687: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1688: @*/
1689: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1690: {
1691: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1692: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1693: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1695: PetscFunctionBegin;
1696: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1702: if ((m + n) <= (PetscInt)(sizeof(buf) / sizeof(PetscInt))) {
1703: jdxm = buf;
1704: jdxn = buf + m;
1705: } else {
1706: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1707: jdxm = bufm;
1708: jdxn = bufn;
1709: }
1710: for (i = 0; i < m; i++) {
1711: for (j = 0; j < 3 - sdim; j++) dxm++;
1712: tmp = *dxm++ - starts[0];
1713: for (j = 0; j < dim - 1; j++) {
1714: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1715: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1716: }
1717: if (mat->stencil.noc) dxm++;
1718: jdxm[i] = tmp;
1719: }
1720: for (i = 0; i < n; i++) {
1721: for (j = 0; j < 3 - sdim; j++) dxn++;
1722: tmp = *dxn++ - starts[0];
1723: for (j = 0; j < dim - 1; j++) {
1724: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1725: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1726: }
1727: if (mat->stencil.noc) dxn++;
1728: jdxn[i] = tmp;
1729: }
1730: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1731: PetscCall(PetscFree2(bufm, bufn));
1732: PetscFunctionReturn(PETSC_SUCCESS);
1733: }
1735: /*@
1736: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1737: Using structured grid indexing
1739: Not Collective
1741: Input Parameters:
1742: + mat - the matrix
1743: . m - number of rows being entered
1744: . idxm - grid coordinates for matrix rows being entered
1745: . n - number of columns being entered
1746: . idxn - grid coordinates for matrix columns being entered
1747: . v - a logically two-dimensional array of values
1748: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1750: Level: beginner
1752: Notes:
1753: By default the values, `v`, are row-oriented and unsorted.
1754: See `MatSetOption()` for other options.
1756: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1757: options cannot be mixed without intervening calls to the assembly
1758: routines.
1760: The grid coordinates are across the entire grid, not just the local portion
1762: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1763: as well as in C.
1765: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1767: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1768: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1770: The columns and rows in the stencil passed in MUST be contained within the
1771: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1772: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1773: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1774: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1776: Negative indices may be passed in idxm and idxn, these rows and columns are
1777: simply ignored. This allows easily inserting element stiffness matrices
1778: with homogeneous Dirchlet boundary conditions that you don't want represented
1779: in the matrix.
1781: Inspired by the structured grid interface to the HYPRE package
1782: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1784: Fortran Note:
1785: `idxm` and `idxn` should be declared as
1786: $ MatStencil idxm(4,m),idxn(4,n)
1787: and the values inserted using
1788: .vb
1789: idxm(MatStencil_i,1) = i
1790: idxm(MatStencil_j,1) = j
1791: idxm(MatStencil_k,1) = k
1792: etc
1793: .ve
1795: .seealso: [](chapter_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1796: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1797: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1798: @*/
1799: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1800: {
1801: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1802: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1803: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1805: PetscFunctionBegin;
1806: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1813: if ((m + n) <= (PetscInt)(sizeof(buf) / sizeof(PetscInt))) {
1814: jdxm = buf;
1815: jdxn = buf + m;
1816: } else {
1817: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1818: jdxm = bufm;
1819: jdxn = bufn;
1820: }
1821: for (i = 0; i < m; i++) {
1822: for (j = 0; j < 3 - sdim; j++) dxm++;
1823: tmp = *dxm++ - starts[0];
1824: for (j = 0; j < sdim - 1; j++) {
1825: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1826: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1827: }
1828: dxm++;
1829: jdxm[i] = tmp;
1830: }
1831: for (i = 0; i < n; i++) {
1832: for (j = 0; j < 3 - sdim; j++) dxn++;
1833: tmp = *dxn++ - starts[0];
1834: for (j = 0; j < sdim - 1; j++) {
1835: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1836: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1837: }
1838: dxn++;
1839: jdxn[i] = tmp;
1840: }
1841: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1842: PetscCall(PetscFree2(bufm, bufn));
1843: PetscFunctionReturn(PETSC_SUCCESS);
1844: }
1846: /*@
1847: MatSetStencil - Sets the grid information for setting values into a matrix via
1848: `MatSetValuesStencil()`
1850: Not Collective
1852: Input Parameters:
1853: + mat - the matrix
1854: . dim - dimension of the grid 1, 2, or 3
1855: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1856: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1857: - dof - number of degrees of freedom per node
1859: Level: beginner
1861: Notes:
1862: Inspired by the structured grid interface to the HYPRE package
1863: (www.llnl.gov/CASC/hyper)
1865: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1866: user.
1868: .seealso: [](chapter_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1869: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1870: @*/
1871: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1872: {
1873: PetscFunctionBegin;
1878: mat->stencil.dim = dim + (dof > 1);
1879: for (PetscInt i = 0; i < dim; i++) {
1880: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1881: mat->stencil.starts[i] = starts[dim - i - 1];
1882: }
1883: mat->stencil.dims[dim] = dof;
1884: mat->stencil.starts[dim] = 0;
1885: mat->stencil.noc = (PetscBool)(dof == 1);
1886: PetscFunctionReturn(PETSC_SUCCESS);
1887: }
1889: /*@C
1890: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1892: Not Collective
1894: Input Parameters:
1895: + mat - the matrix
1896: . v - a logically two-dimensional array of values
1897: . m - the number of block rows
1898: . idxm - the global block indices
1899: . n - the number of block columns
1900: . idxn - the global block indices
1901: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
1903: Level: intermediate
1905: Notes:
1906: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
1907: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
1909: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
1910: NOT the total number of rows/columns; for example, if the block size is 2 and
1911: you are passing in values for rows 2,3,4,5 then m would be 2 (not 4).
1912: The values in idxm would be 1 2; that is the first index for each block divided by
1913: the block size.
1915: You must call `MatSetBlockSize()` when constructing this matrix (before
1916: preallocating it).
1918: By default the values, `v`, are row-oriented, so the layout of
1919: `v` is the same as for `MatSetValues()`. See `MatSetOption()` for other options.
1921: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
1922: options cannot be mixed without intervening calls to the assembly
1923: routines.
1925: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
1926: as well as in C.
1928: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1929: simply ignored. This allows easily inserting element stiffness matrices
1930: with homogeneous Dirchlet boundary conditions that you don't want represented
1931: in the matrix.
1933: Each time an entry is set within a sparse matrix via `MatSetValues()`,
1934: internal searching must be done to determine where to place the
1935: data in the matrix storage space. By instead inserting blocks of
1936: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
1937: reduced.
1939: Example:
1940: .vb
1941: Suppose m=n=2 and block size(bs) = 2 The array is
1943: 1 2 | 3 4
1944: 5 6 | 7 8
1945: - - - | - - -
1946: 9 10 | 11 12
1947: 13 14 | 15 16
1949: v[] should be passed in like
1950: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
1952: If you are not using row oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
1953: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
1954: .ve
1956: .seealso: [](chapter_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
1957: @*/
1958: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1959: {
1960: PetscFunctionBeginHot;
1963: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1966: MatCheckPreallocated(mat, 1);
1967: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1968: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1969: if (PetscDefined(USE_DEBUG)) {
1970: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1971: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
1972: }
1973: if (PetscDefined(USE_DEBUG)) {
1974: PetscInt rbs, cbs, M, N, i;
1975: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1976: PetscCall(MatGetSize(mat, &M, &N));
1977: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block index %" PetscInt_FMT " (index %" PetscInt_FMT ") greater than row length %" PetscInt_FMT, i, idxm[i], M);
1978: for (i = 0; i < n; i++) PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block index %" PetscInt_FMT " (index %" PetscInt_FMT ") great than column length %" PetscInt_FMT, i, idxn[i], N);
1979: }
1980: if (mat->assembled) {
1981: mat->was_assembled = PETSC_TRUE;
1982: mat->assembled = PETSC_FALSE;
1983: }
1984: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1985: if (mat->ops->setvaluesblocked) {
1986: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
1987: } else {
1988: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
1989: PetscInt i, j, bs, cbs;
1991: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
1992: if (m * bs + n * cbs <= (PetscInt)(sizeof(buf) / sizeof(PetscInt))) {
1993: iidxm = buf;
1994: iidxn = buf + m * bs;
1995: } else {
1996: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
1997: iidxm = bufr;
1998: iidxn = bufc;
1999: }
2000: for (i = 0; i < m; i++) {
2001: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2002: }
2003: if (m != n || bs != cbs || idxm != idxn) {
2004: for (i = 0; i < n; i++) {
2005: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2006: }
2007: } else iidxn = iidxm;
2008: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2009: PetscCall(PetscFree2(bufr, bufc));
2010: }
2011: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2012: PetscFunctionReturn(PETSC_SUCCESS);
2013: }
2015: /*@C
2016: MatGetValues - Gets a block of local values from a matrix.
2018: Not Collective; can only return values that are owned by the give process
2020: Input Parameters:
2021: + mat - the matrix
2022: . v - a logically two-dimensional array for storing the values
2023: . m - the number of rows
2024: . idxm - the global indices of the rows
2025: . n - the number of columns
2026: - idxn - the global indices of the columns
2028: Level: advanced
2030: Notes:
2031: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2032: The values, `v`, are then returned in a row-oriented format,
2033: analogous to that used by default in `MatSetValues()`.
2035: `MatGetValues()` uses 0-based row and column numbers in
2036: Fortran as well as in C.
2038: `MatGetValues()` requires that the matrix has been assembled
2039: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2040: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2041: without intermediate matrix assembly.
2043: Negative row or column indices will be ignored and those locations in `v` will be
2044: left unchanged.
2046: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI rank.
2047: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2048: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2050: .seealso: [](chapter_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2051: @*/
2052: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2053: {
2054: PetscFunctionBegin;
2057: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2061: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2062: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2063: MatCheckPreallocated(mat, 1);
2065: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2066: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2067: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2068: PetscFunctionReturn(PETSC_SUCCESS);
2069: }
2071: /*@C
2072: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2073: defined previously by `MatSetLocalToGlobalMapping()`
2075: Not Collective
2077: Input Parameters:
2078: + mat - the matrix
2079: . nrow - number of rows
2080: . irow - the row local indices
2081: . ncol - number of columns
2082: - icol - the column local indices
2084: Output Parameter:
2085: . y - a logically two-dimensional array of values
2087: Level: advanced
2089: Notes:
2090: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2092: This routine can only return values that are owned by the requesting MPI rank. That is, for standard matrix formats, rows that, in the global numbering,
2093: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2094: determine if the resulting global row associated with the local row r is owned by the requesting MPI rank by applying the `ISLocalToGlobalMapping` set
2095: with `MatSetLocalToGlobalMapping()`.
2097: Developer Note:
2098: This is labelled with C so does not automatically generate Fortran stubs and interfaces
2099: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2101: .seealso: [](chapter_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2102: `MatSetValuesLocal()`, `MatGetValues()`
2103: @*/
2104: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2105: {
2106: PetscFunctionBeginHot;
2109: MatCheckPreallocated(mat, 1);
2110: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2113: if (PetscDefined(USE_DEBUG)) {
2114: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2115: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2116: }
2117: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2118: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2119: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2120: else {
2121: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2122: if ((nrow + ncol) <= (PetscInt)(sizeof(buf) / sizeof(PetscInt))) {
2123: irowm = buf;
2124: icolm = buf + nrow;
2125: } else {
2126: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2127: irowm = bufr;
2128: icolm = bufc;
2129: }
2130: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2131: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2132: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2133: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2134: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2135: PetscCall(PetscFree2(bufr, bufc));
2136: }
2137: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2138: PetscFunctionReturn(PETSC_SUCCESS);
2139: }
2141: /*@
2142: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2143: the same size. Currently, this can only be called once and creates the given matrix.
2145: Not Collective
2147: Input Parameters:
2148: + mat - the matrix
2149: . nb - the number of blocks
2150: . bs - the number of rows (and columns) in each block
2151: . rows - a concatenation of the rows for each block
2152: - v - a concatenation of logically two-dimensional arrays of values
2154: Level: advanced
2156: Note:
2157: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2159: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2161: .seealso: [](chapter_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2162: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2163: @*/
2164: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2165: {
2166: PetscFunctionBegin;
2171: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2173: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2174: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2175: else {
2176: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2177: }
2178: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2179: PetscFunctionReturn(PETSC_SUCCESS);
2180: }
2182: /*@
2183: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2184: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2185: using a local (per-processor) numbering.
2187: Not Collective
2189: Input Parameters:
2190: + x - the matrix
2191: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2192: - cmapping - column mapping
2194: Level: intermediate
2196: Note:
2197: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2199: .seealso: [](chapter_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2200: @*/
2201: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2202: {
2203: PetscFunctionBegin;
2208: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2209: else {
2210: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2211: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2212: }
2213: PetscFunctionReturn(PETSC_SUCCESS);
2214: }
2216: /*@
2217: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2219: Not Collective
2221: Input Parameter:
2222: . A - the matrix
2224: Output Parameters:
2225: + rmapping - row mapping
2226: - cmapping - column mapping
2228: Level: advanced
2230: .seealso: [](chapter_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2231: @*/
2232: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2233: {
2234: PetscFunctionBegin;
2237: if (rmapping) {
2239: *rmapping = A->rmap->mapping;
2240: }
2241: if (cmapping) {
2243: *cmapping = A->cmap->mapping;
2244: }
2245: PetscFunctionReturn(PETSC_SUCCESS);
2246: }
2248: /*@
2249: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2251: Logically Collective
2253: Input Parameters:
2254: + A - the matrix
2255: . rmap - row layout
2256: - cmap - column layout
2258: Level: advanced
2260: Note:
2261: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2263: .seealso: [](chapter_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2264: @*/
2265: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2266: {
2267: PetscFunctionBegin;
2269: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2270: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2271: PetscFunctionReturn(PETSC_SUCCESS);
2272: }
2274: /*@
2275: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2277: Not Collective
2279: Input Parameter:
2280: . A - the matrix
2282: Output Parameters:
2283: + rmap - row layout
2284: - cmap - column layout
2286: Level: advanced
2288: .seealso: [](chapter_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2289: @*/
2290: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2291: {
2292: PetscFunctionBegin;
2295: if (rmap) {
2297: *rmap = A->rmap;
2298: }
2299: if (cmap) {
2301: *cmap = A->cmap;
2302: }
2303: PetscFunctionReturn(PETSC_SUCCESS);
2304: }
2306: /*@C
2307: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2308: using a local numbering of the nodes.
2310: Not Collective
2312: Input Parameters:
2313: + mat - the matrix
2314: . nrow - number of rows
2315: . irow - the row local indices
2316: . ncol - number of columns
2317: . icol - the column local indices
2318: . y - a logically two-dimensional array of values
2319: - addv - either `INSERT_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2321: Level: intermediate
2323: Notes:
2324: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call MatXXXXSetPreallocation() or
2325: `MatSetUp()` before using this routine
2327: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2329: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2330: options cannot be mixed without intervening calls to the assembly
2331: routines.
2333: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2334: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2336: Developer Note:
2337: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2338: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2340: .seealso: [](chapter_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2341: `MatGetValuesLocal()`
2342: @*/
2343: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2344: {
2345: PetscFunctionBeginHot;
2348: MatCheckPreallocated(mat, 1);
2349: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2352: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2353: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2354: if (PetscDefined(USE_DEBUG)) {
2355: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2356: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2357: }
2359: if (mat->assembled) {
2360: mat->was_assembled = PETSC_TRUE;
2361: mat->assembled = PETSC_FALSE;
2362: }
2363: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2364: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2365: else {
2366: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2367: const PetscInt *irowm, *icolm;
2369: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)(sizeof(buf) / sizeof(PetscInt))) {
2370: bufr = buf;
2371: bufc = buf + nrow;
2372: irowm = bufr;
2373: icolm = bufc;
2374: } else {
2375: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2376: irowm = bufr;
2377: icolm = bufc;
2378: }
2379: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2380: else irowm = irow;
2381: if (mat->cmap->mapping) {
2382: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2383: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2384: } else icolm = irowm;
2385: } else icolm = icol;
2386: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2387: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2388: }
2389: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2390: PetscFunctionReturn(PETSC_SUCCESS);
2391: }
2393: /*@C
2394: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2395: using a local ordering of the nodes a block at a time.
2397: Not Collective
2399: Input Parameters:
2400: + x - the matrix
2401: . nrow - number of rows
2402: . irow - the row local indices
2403: . ncol - number of columns
2404: . icol - the column local indices
2405: . y - a logically two-dimensional array of values
2406: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2408: Level: intermediate
2410: Notes:
2411: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call MatXXXXSetPreallocation() or
2412: `MatSetUp()` before using this routine
2414: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2415: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2417: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2418: options cannot be mixed without intervening calls to the assembly
2419: routines.
2421: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2422: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2424: Developer Note:
2425: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2426: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2428: .seealso: [](chapter_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2429: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2430: @*/
2431: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2432: {
2433: PetscFunctionBeginHot;
2436: MatCheckPreallocated(mat, 1);
2437: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2440: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2441: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2442: if (PetscDefined(USE_DEBUG)) {
2443: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2444: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2445: }
2447: if (mat->assembled) {
2448: mat->was_assembled = PETSC_TRUE;
2449: mat->assembled = PETSC_FALSE;
2450: }
2451: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2452: PetscInt irbs, rbs;
2453: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2454: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2455: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2456: }
2457: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2458: PetscInt icbs, cbs;
2459: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2460: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2461: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2462: }
2463: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2464: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2465: else {
2466: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2467: const PetscInt *irowm, *icolm;
2469: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)(sizeof(buf) / sizeof(PetscInt))) {
2470: bufr = buf;
2471: bufc = buf + nrow;
2472: irowm = bufr;
2473: icolm = bufc;
2474: } else {
2475: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2476: irowm = bufr;
2477: icolm = bufc;
2478: }
2479: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2480: else irowm = irow;
2481: if (mat->cmap->mapping) {
2482: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2483: PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2484: } else icolm = irowm;
2485: } else icolm = icol;
2486: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2487: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2488: }
2489: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2490: PetscFunctionReturn(PETSC_SUCCESS);
2491: }
2493: /*@
2494: MatMultDiagonalBlock - Computes the matrix-vector product, y = Dx. Where D is defined by the inode or block structure of the diagonal
2496: Collective
2498: Input Parameters:
2499: + mat - the matrix
2500: - x - the vector to be multiplied
2502: Output Parameter:
2503: . y - the result
2505: Level: developer
2507: Note:
2508: The vectors `x` and `y` cannot be the same. I.e., one cannot
2509: call `MatMultDiagonalBlock`(A,y,y).
2511: .seealso: [](chapter_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2512: @*/
2513: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2514: {
2515: PetscFunctionBegin;
2521: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2522: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2523: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2524: MatCheckPreallocated(mat, 1);
2526: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2527: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2528: PetscFunctionReturn(PETSC_SUCCESS);
2529: }
2531: /*@
2532: MatMult - Computes the matrix-vector product, y = Ax.
2534: Neighbor-wise Collective
2536: Input Parameters:
2537: + mat - the matrix
2538: - x - the vector to be multiplied
2540: Output Parameter:
2541: . y - the result
2543: Level: beginner
2545: Note:
2546: The vectors `x` and `y` cannot be the same. I.e., one cannot
2547: call `MatMult`(A,y,y).
2549: .seealso: [](chapter_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2550: @*/
2551: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2552: {
2553: PetscFunctionBegin;
2557: VecCheckAssembled(x);
2559: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2560: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2561: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2562: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2563: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2564: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2565: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2566: PetscCall(VecSetErrorIfLocked(y, 3));
2567: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2568: MatCheckPreallocated(mat, 1);
2570: PetscCall(VecLockReadPush(x));
2571: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2572: PetscUseTypeMethod(mat, mult, x, y);
2573: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2574: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2575: PetscCall(VecLockReadPop(x));
2576: PetscFunctionReturn(PETSC_SUCCESS);
2577: }
2579: /*@
2580: MatMultTranspose - Computes matrix transpose times a vector y = A^T * x.
2582: Neighbor-wise Collective
2584: Input Parameters:
2585: + mat - the matrix
2586: - x - the vector to be multiplied
2588: Output Parameter:
2589: . y - the result
2591: Level: beginner
2593: Notes:
2594: The vectors `x` and `y` cannot be the same. I.e., one cannot
2595: call `MatMultTranspose`(A,y,y).
2597: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2598: use `MatMultHermitianTranspose()`
2600: .seealso: [](chapter_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2601: @*/
2602: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2603: {
2604: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2606: PetscFunctionBegin;
2610: VecCheckAssembled(x);
2613: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2614: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2615: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2616: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2617: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2618: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2619: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2620: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2621: MatCheckPreallocated(mat, 1);
2623: if (!mat->ops->multtranspose) {
2624: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2625: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2626: } else op = mat->ops->multtranspose;
2627: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2628: PetscCall(VecLockReadPush(x));
2629: PetscCall((*op)(mat, x, y));
2630: PetscCall(VecLockReadPop(x));
2631: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2632: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2633: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2634: PetscFunctionReturn(PETSC_SUCCESS);
2635: }
2637: /*@
2638: MatMultHermitianTranspose - Computes matrix Hermitian transpose times a vector.
2640: Neighbor-wise Collective
2642: Input Parameters:
2643: + mat - the matrix
2644: - x - the vector to be multilplied
2646: Output Parameter:
2647: . y - the result
2649: Level: beginner
2651: Notes:
2652: The vectors `x` and `y` cannot be the same. I.e., one cannot
2653: call `MatMultHermitianTranspose`(A,y,y).
2655: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2657: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2659: .seealso: [](chapter_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2660: @*/
2661: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2662: {
2663: PetscFunctionBegin;
2669: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2670: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2671: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2672: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2673: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2674: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2675: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2676: MatCheckPreallocated(mat, 1);
2678: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2679: #if defined(PETSC_USE_COMPLEX)
2680: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2681: PetscCall(VecLockReadPush(x));
2682: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2683: else PetscUseTypeMethod(mat, mult, x, y);
2684: PetscCall(VecLockReadPop(x));
2685: } else {
2686: Vec w;
2687: PetscCall(VecDuplicate(x, &w));
2688: PetscCall(VecCopy(x, w));
2689: PetscCall(VecConjugate(w));
2690: PetscCall(MatMultTranspose(mat, w, y));
2691: PetscCall(VecDestroy(&w));
2692: PetscCall(VecConjugate(y));
2693: }
2694: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2695: #else
2696: PetscCall(MatMultTranspose(mat, x, y));
2697: #endif
2698: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2699: PetscFunctionReturn(PETSC_SUCCESS);
2700: }
2702: /*@
2703: MatMultAdd - Computes v3 = v2 + A * v1.
2705: Neighbor-wise Collective
2707: Input Parameters:
2708: + mat - the matrix
2709: . v1 - the vector to be multiplied by `mat`
2710: - v2 - the vector to be added to the result
2712: Output Parameter:
2713: . v3 - the result
2715: Level: beginner
2717: Note:
2718: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2719: call `MatMultAdd`(A,v1,v2,v1).
2721: .seealso: [](chapter_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2722: @*/
2723: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2724: {
2725: PetscFunctionBegin;
2732: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2733: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2734: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2735: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2736: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2737: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2738: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2739: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2740: MatCheckPreallocated(mat, 1);
2742: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2743: PetscCall(VecLockReadPush(v1));
2744: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2745: PetscCall(VecLockReadPop(v1));
2746: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2747: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2748: PetscFunctionReturn(PETSC_SUCCESS);
2749: }
2751: /*@
2752: MatMultTransposeAdd - Computes v3 = v2 + A' * v1.
2754: Neighbor-wise Collective
2756: Input Parameters:
2757: + mat - the matrix
2758: . v1 - the vector to be multiplied by the transpose of the matrix
2759: - v2 - the vector to be added to the result
2761: Output Parameter:
2762: . v3 - the result
2764: Level: beginner
2766: Note:
2767: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2768: call `MatMultTransposeAdd`(A,v1,v2,v1).
2770: .seealso: [](chapter_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2771: @*/
2772: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2773: {
2774: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2776: PetscFunctionBegin;
2783: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2784: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2785: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2786: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2787: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2788: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2789: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2790: MatCheckPreallocated(mat, 1);
2792: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2793: PetscCall(VecLockReadPush(v1));
2794: PetscCall((*op)(mat, v1, v2, v3));
2795: PetscCall(VecLockReadPop(v1));
2796: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2797: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2798: PetscFunctionReturn(PETSC_SUCCESS);
2799: }
2801: /*@
2802: MatMultHermitianTransposeAdd - Computes v3 = v2 + A^H * v1.
2804: Neighbor-wise Collective
2806: Input Parameters:
2807: + mat - the matrix
2808: . v1 - the vector to be multiplied by the Hermitian transpose
2809: - v2 - the vector to be added to the result
2811: Output Parameter:
2812: . v3 - the result
2814: Level: beginner
2816: Note:
2817: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2818: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2820: .seealso: [](chapter_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2821: @*/
2822: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2823: {
2824: PetscFunctionBegin;
2831: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2832: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2833: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2834: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2835: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2836: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2837: MatCheckPreallocated(mat, 1);
2839: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2840: PetscCall(VecLockReadPush(v1));
2841: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2842: else {
2843: Vec w, z;
2844: PetscCall(VecDuplicate(v1, &w));
2845: PetscCall(VecCopy(v1, w));
2846: PetscCall(VecConjugate(w));
2847: PetscCall(VecDuplicate(v3, &z));
2848: PetscCall(MatMultTranspose(mat, w, z));
2849: PetscCall(VecDestroy(&w));
2850: PetscCall(VecConjugate(z));
2851: if (v2 != v3) {
2852: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2853: } else {
2854: PetscCall(VecAXPY(v3, 1.0, z));
2855: }
2856: PetscCall(VecDestroy(&z));
2857: }
2858: PetscCall(VecLockReadPop(v1));
2859: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2860: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2861: PetscFunctionReturn(PETSC_SUCCESS);
2862: }
2864: /*@C
2865: MatGetFactorType - gets the type of factorization it is
2867: Not Collective
2869: Input Parameter:
2870: . mat - the matrix
2872: Output Parameter:
2873: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2875: Level: intermediate
2877: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2878: `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2879: @*/
2880: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
2881: {
2882: PetscFunctionBegin;
2886: *t = mat->factortype;
2887: PetscFunctionReturn(PETSC_SUCCESS);
2888: }
2890: /*@C
2891: MatSetFactorType - sets the type of factorization it is
2893: Logically Collective
2895: Input Parameters:
2896: + mat - the matrix
2897: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2899: Level: intermediate
2901: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2902: `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2903: @*/
2904: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
2905: {
2906: PetscFunctionBegin;
2909: mat->factortype = t;
2910: PetscFunctionReturn(PETSC_SUCCESS);
2911: }
2913: /*@C
2914: MatGetInfo - Returns information about matrix storage (number of
2915: nonzeros, memory, etc.).
2917: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
2919: Input Parameters:
2920: + mat - the matrix
2921: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
2923: Output Parameter:
2924: . info - matrix information context
2926: Notes:
2927: The `MatInfo` context contains a variety of matrix data, including
2928: number of nonzeros allocated and used, number of mallocs during
2929: matrix assembly, etc. Additional information for factored matrices
2930: is provided (such as the fill ratio, number of mallocs during
2931: factorization, etc.). Much of this info is printed to `PETSC_STDOUT`
2932: when using the runtime options
2933: $ -info -mat_view ::ascii_info
2935: Example:
2936: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
2937: data within the MatInfo context. For example,
2938: .vb
2939: MatInfo info;
2940: Mat A;
2941: double mal, nz_a, nz_u;
2943: MatGetInfo(A,MAT_LOCAL,&info);
2944: mal = info.mallocs;
2945: nz_a = info.nz_allocated;
2946: .ve
2948: Fortran users should declare info as a double precision
2949: array of dimension `MAT_INFO_SIZE`, and then extract the parameters
2950: of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
2951: a complete list of parameter names.
2952: .vb
2953: double precision info(MAT_INFO_SIZE)
2954: double precision mal, nz_a
2955: Mat A
2956: integer ierr
2958: call MatGetInfo(A,MAT_LOCAL,info,ierr)
2959: mal = info(MAT_INFO_MALLOCS)
2960: nz_a = info(MAT_INFO_NZ_ALLOCATED)
2961: .ve
2963: Level: intermediate
2965: Developer Note:
2966: The Fortran interface is not autogenerated as the
2967: interface definition cannot be generated correctly [due to `MatInfo` argument]
2969: .seealso: [](chapter_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
2970: @*/
2971: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
2972: {
2973: PetscFunctionBegin;
2977: MatCheckPreallocated(mat, 1);
2978: PetscUseTypeMethod(mat, getinfo, flag, info);
2979: PetscFunctionReturn(PETSC_SUCCESS);
2980: }
2982: /*
2983: This is used by external packages where it is not easy to get the info from the actual
2984: matrix factorization.
2985: */
2986: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
2987: {
2988: PetscFunctionBegin;
2989: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
2990: PetscFunctionReturn(PETSC_SUCCESS);
2991: }
2993: /*@C
2994: MatLUFactor - Performs in-place LU factorization of matrix.
2996: Collective
2998: Input Parameters:
2999: + mat - the matrix
3000: . row - row permutation
3001: . col - column permutation
3002: - info - options for factorization, includes
3003: .vb
3004: fill - expected fill as ratio of original fill.
3005: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3006: Run with the option -info to determine an optimal value to use
3007: .ve
3008: Level: developer
3010: Notes:
3011: Most users should employ the `KSP` interface for linear solvers
3012: instead of working directly with matrix algebra routines such as this.
3013: See, e.g., `KSPCreate()`.
3015: This changes the state of the matrix to a factored matrix; it cannot be used
3016: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3018: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3019: when not using `KSP`.
3021: Developer Note:
3022: The Fortran interface is not autogenerated as the
3023: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3025: .seealso: [](chapter_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3026: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3027: @*/
3028: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3029: {
3030: MatFactorInfo tinfo;
3032: PetscFunctionBegin;
3038: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3039: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3040: MatCheckPreallocated(mat, 1);
3041: if (!info) {
3042: PetscCall(MatFactorInfoInitialize(&tinfo));
3043: info = &tinfo;
3044: }
3046: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3047: PetscUseTypeMethod(mat, lufactor, row, col, info);
3048: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3049: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3050: PetscFunctionReturn(PETSC_SUCCESS);
3051: }
3053: /*@C
3054: MatILUFactor - Performs in-place ILU factorization of matrix.
3056: Collective
3058: Input Parameters:
3059: + mat - the matrix
3060: . row - row permutation
3061: . col - column permutation
3062: - info - structure containing
3063: .vb
3064: levels - number of levels of fill.
3065: expected fill - as ratio of original fill.
3066: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3067: missing diagonal entries)
3068: .ve
3070: Level: developer
3072: Notes:
3073: Most users should employ the `KSP` interface for linear solvers
3074: instead of working directly with matrix algebra routines such as this.
3075: See, e.g., `KSPCreate()`.
3077: Probably really in-place only when level of fill is zero, otherwise allocates
3078: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3079: when not using `KSP`.
3081: Developer Note:
3082: The Fortran interface is not autogenerated as the
3083: interface definition cannot be generated correctly [due to MatFactorInfo]
3085: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3086: @*/
3087: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3088: {
3089: PetscFunctionBegin;
3095: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3096: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3097: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3098: MatCheckPreallocated(mat, 1);
3100: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3101: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3102: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3103: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3104: PetscFunctionReturn(PETSC_SUCCESS);
3105: }
3107: /*@C
3108: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3109: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3111: Collective
3113: Input Parameters:
3114: + fact - the factor matrix obtained with `MatGetFactor()`
3115: . mat - the matrix
3116: . row - the row permutation
3117: . col - the column permutation
3118: - info - options for factorization, includes
3119: .vb
3120: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3121: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3122: .ve
3124: Level: developer
3126: Notes:
3127: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3129: Most users should employ the simplified `KSP` interface for linear solvers
3130: instead of working directly with matrix algebra routines such as this.
3131: See, e.g., `KSPCreate()`.
3133: Developer Note:
3134: The Fortran interface is not autogenerated as the
3135: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3137: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3138: @*/
3139: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3140: {
3141: MatFactorInfo tinfo;
3143: PetscFunctionBegin;
3150: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3151: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3152: if (!(fact)->ops->lufactorsymbolic) {
3153: MatSolverType stype;
3154: PetscCall(MatFactorGetSolverType(fact, &stype));
3155: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s symbolic LU using solver package %s", ((PetscObject)mat)->type_name, stype);
3156: }
3157: MatCheckPreallocated(mat, 2);
3158: if (!info) {
3159: PetscCall(MatFactorInfoInitialize(&tinfo));
3160: info = &tinfo;
3161: }
3163: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3164: PetscCall((fact->ops->lufactorsymbolic)(fact, mat, row, col, info));
3165: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3166: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3167: PetscFunctionReturn(PETSC_SUCCESS);
3168: }
3170: /*@C
3171: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3172: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3174: Collective
3176: Input Parameters:
3177: + fact - the factor matrix obtained with `MatGetFactor()`
3178: . mat - the matrix
3179: - info - options for factorization
3181: Level: developer
3183: Notes:
3184: See `MatLUFactor()` for in-place factorization. See
3185: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3187: Most users should employ the `KSP` interface for linear solvers
3188: instead of working directly with matrix algebra routines such as this.
3189: See, e.g., `KSPCreate()`.
3191: Developer Note:
3192: The Fortran interface is not autogenerated as the
3193: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3195: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3196: @*/
3197: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3198: {
3199: MatFactorInfo tinfo;
3201: PetscFunctionBegin;
3206: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3207: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3208: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3210: PetscCheck((fact)->ops->lufactornumeric, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s numeric LU", ((PetscObject)mat)->type_name);
3211: MatCheckPreallocated(mat, 2);
3212: if (!info) {
3213: PetscCall(MatFactorInfoInitialize(&tinfo));
3214: info = &tinfo;
3215: }
3217: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3218: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3219: PetscCall((fact->ops->lufactornumeric)(fact, mat, info));
3220: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3221: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3222: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3223: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3224: PetscFunctionReturn(PETSC_SUCCESS);
3225: }
3227: /*@C
3228: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3229: symmetric matrix.
3231: Collective
3233: Input Parameters:
3234: + mat - the matrix
3235: . perm - row and column permutations
3236: - f - expected fill as ratio of original fill
3238: Level: developer
3240: Notes:
3241: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3242: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3244: Most users should employ the `KSP` interface for linear solvers
3245: instead of working directly with matrix algebra routines such as this.
3246: See, e.g., `KSPCreate()`.
3248: Developer Note:
3249: The Fortran interface is not autogenerated as the
3250: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3252: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3253: `MatGetOrdering()`
3254: @*/
3255: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3256: {
3257: MatFactorInfo tinfo;
3259: PetscFunctionBegin;
3264: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3265: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3266: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3267: MatCheckPreallocated(mat, 1);
3268: if (!info) {
3269: PetscCall(MatFactorInfoInitialize(&tinfo));
3270: info = &tinfo;
3271: }
3273: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3274: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3275: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3276: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3277: PetscFunctionReturn(PETSC_SUCCESS);
3278: }
3280: /*@C
3281: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3282: of a symmetric matrix.
3284: Collective
3286: Input Parameters:
3287: + fact - the factor matrix obtained with `MatGetFactor()`
3288: . mat - the matrix
3289: . perm - row and column permutations
3290: - info - options for factorization, includes
3291: .vb
3292: fill - expected fill as ratio of original fill.
3293: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3294: Run with the option -info to determine an optimal value to use
3295: .ve
3297: Level: developer
3299: Notes:
3300: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3301: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3303: Most users should employ the `KSP` interface for linear solvers
3304: instead of working directly with matrix algebra routines such as this.
3305: See, e.g., `KSPCreate()`.
3307: Developer Note:
3308: The Fortran interface is not autogenerated as the
3309: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3311: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3312: `MatGetOrdering()`
3313: @*/
3314: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3315: {
3316: MatFactorInfo tinfo;
3318: PetscFunctionBegin;
3324: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3325: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3326: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3327: if (!(fact)->ops->choleskyfactorsymbolic) {
3328: MatSolverType stype;
3329: PetscCall(MatFactorGetSolverType(fact, &stype));
3330: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s symbolic factor Cholesky using solver package %s", ((PetscObject)mat)->type_name, stype);
3331: }
3332: MatCheckPreallocated(mat, 2);
3333: if (!info) {
3334: PetscCall(MatFactorInfoInitialize(&tinfo));
3335: info = &tinfo;
3336: }
3338: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3339: PetscCall((fact->ops->choleskyfactorsymbolic)(fact, mat, perm, info));
3340: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3341: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3342: PetscFunctionReturn(PETSC_SUCCESS);
3343: }
3345: /*@C
3346: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3347: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3348: `MatCholeskyFactorSymbolic()`.
3350: Collective
3352: Input Parameters:
3353: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3354: . mat - the initial matrix that is to be factored
3355: - info - options for factorization
3357: Level: developer
3359: Note:
3360: Most users should employ the `KSP` interface for linear solvers
3361: instead of working directly with matrix algebra routines such as this.
3362: See, e.g., `KSPCreate()`.
3364: Developer Note:
3365: The Fortran interface is not autogenerated as the
3366: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3368: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3369: @*/
3370: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3371: {
3372: MatFactorInfo tinfo;
3374: PetscFunctionBegin;
3379: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3380: PetscCheck((fact)->ops->choleskyfactornumeric, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s numeric factor Cholesky", ((PetscObject)mat)->type_name);
3381: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3382: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3383: MatCheckPreallocated(mat, 2);
3384: if (!info) {
3385: PetscCall(MatFactorInfoInitialize(&tinfo));
3386: info = &tinfo;
3387: }
3389: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3390: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3391: PetscCall((fact->ops->choleskyfactornumeric)(fact, mat, info));
3392: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3393: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3394: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3395: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3396: PetscFunctionReturn(PETSC_SUCCESS);
3397: }
3399: /*@
3400: MatQRFactor - Performs in-place QR factorization of matrix.
3402: Collective
3404: Input Parameters:
3405: + mat - the matrix
3406: . col - column permutation
3407: - info - options for factorization, includes
3408: .vb
3409: fill - expected fill as ratio of original fill.
3410: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3411: Run with the option -info to determine an optimal value to use
3412: .ve
3414: Level: developer
3416: Notes:
3417: Most users should employ the `KSP` interface for linear solvers
3418: instead of working directly with matrix algebra routines such as this.
3419: See, e.g., `KSPCreate()`.
3421: This changes the state of the matrix to a factored matrix; it cannot be used
3422: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3424: Developer Note:
3425: The Fortran interface is not autogenerated as the
3426: interface definition cannot be generated correctly [due to MatFactorInfo]
3428: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3429: `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3430: @*/
3431: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3432: {
3433: PetscFunctionBegin;
3438: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3439: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3440: MatCheckPreallocated(mat, 1);
3441: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3442: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3443: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3444: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3445: PetscFunctionReturn(PETSC_SUCCESS);
3446: }
3448: /*@
3449: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3450: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3452: Collective
3454: Input Parameters:
3455: + fact - the factor matrix obtained with `MatGetFactor()`
3456: . mat - the matrix
3457: . col - column permutation
3458: - info - options for factorization, includes
3459: .vb
3460: fill - expected fill as ratio of original fill.
3461: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3462: Run with the option -info to determine an optimal value to use
3463: .ve
3465: Level: developer
3467: Note:
3468: Most users should employ the `KSP` interface for linear solvers
3469: instead of working directly with matrix algebra routines such as this.
3470: See, e.g., `KSPCreate()`.
3472: Developer Note:
3473: The Fortran interface is not autogenerated as the
3474: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3476: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3477: @*/
3478: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3479: {
3480: MatFactorInfo tinfo;
3482: PetscFunctionBegin;
3488: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3489: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3490: MatCheckPreallocated(mat, 2);
3491: if (!info) {
3492: PetscCall(MatFactorInfoInitialize(&tinfo));
3493: info = &tinfo;
3494: }
3496: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3497: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3498: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3499: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3500: PetscFunctionReturn(PETSC_SUCCESS);
3501: }
3503: /*@
3504: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3505: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3507: Collective
3509: Input Parameters:
3510: + fact - the factor matrix obtained with `MatGetFactor()`
3511: . mat - the matrix
3512: - info - options for factorization
3514: Level: developer
3516: Notes:
3517: See `MatQRFactor()` for in-place factorization.
3519: Most users should employ the `KSP` interface for linear solvers
3520: instead of working directly with matrix algebra routines such as this.
3521: See, e.g., `KSPCreate()`.
3523: Developer Note:
3524: The Fortran interface is not autogenerated as the
3525: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3527: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3528: @*/
3529: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3530: {
3531: MatFactorInfo tinfo;
3533: PetscFunctionBegin;
3538: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3539: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3540: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3542: MatCheckPreallocated(mat, 2);
3543: if (!info) {
3544: PetscCall(MatFactorInfoInitialize(&tinfo));
3545: info = &tinfo;
3546: }
3548: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3549: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3550: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3551: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3552: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3553: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3554: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3555: PetscFunctionReturn(PETSC_SUCCESS);
3556: }
3558: /*@
3559: MatSolve - Solves A x = b, given a factored matrix.
3561: Neighbor-wise Collective
3563: Input Parameters:
3564: + mat - the factored matrix
3565: - b - the right-hand-side vector
3567: Output Parameter:
3568: . x - the result vector
3570: Level: developer
3572: Notes:
3573: The vectors `b` and `x` cannot be the same. I.e., one cannot
3574: call `MatSolve`(A,x,x).
3576: Most users should employ the `KSP` interface for linear solvers
3577: instead of working directly with matrix algebra routines such as this.
3578: See, e.g., `KSPCreate()`.
3580: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3581: @*/
3582: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3583: {
3584: PetscFunctionBegin;
3589: PetscCheckSameComm(mat, 1, b, 2);
3590: PetscCheckSameComm(mat, 1, x, 3);
3591: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3592: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3593: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3594: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3595: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3596: MatCheckPreallocated(mat, 1);
3598: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3599: if (mat->factorerrortype) {
3600: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3601: PetscCall(VecSetInf(x));
3602: } else PetscUseTypeMethod(mat, solve, b, x);
3603: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3604: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3605: PetscFunctionReturn(PETSC_SUCCESS);
3606: }
3608: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3609: {
3610: Vec b, x;
3611: PetscInt N, i;
3612: PetscErrorCode (*f)(Mat, Vec, Vec);
3613: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3615: PetscFunctionBegin;
3616: if (A->factorerrortype) {
3617: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3618: PetscCall(MatSetInf(X));
3619: PetscFunctionReturn(PETSC_SUCCESS);
3620: }
3621: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3622: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3623: PetscCall(MatBoundToCPU(A, &Abound));
3624: if (!Abound) {
3625: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3626: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3627: }
3628: #if defined(PETSC_HAVE_CUDA)
3629: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3630: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3631: #elif (PETSC_HAVE_HIP)
3632: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3633: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3634: #endif
3635: PetscCall(MatGetSize(B, NULL, &N));
3636: for (i = 0; i < N; i++) {
3637: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3638: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3639: PetscCall((*f)(A, b, x));
3640: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3641: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3642: }
3643: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3644: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3645: PetscFunctionReturn(PETSC_SUCCESS);
3646: }
3648: /*@
3649: MatMatSolve - Solves A X = B, given a factored matrix.
3651: Neighbor-wise Collective
3653: Input Parameters:
3654: + A - the factored matrix
3655: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3657: Output Parameter:
3658: . X - the result matrix (dense matrix)
3660: Level: developer
3662: Note:
3663: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3664: otherwise, `B` and `X` cannot be the same.
3666: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3667: @*/
3668: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3669: {
3670: PetscFunctionBegin;
3675: PetscCheckSameComm(A, 1, B, 2);
3676: PetscCheckSameComm(A, 1, X, 3);
3677: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3678: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3679: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3680: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3681: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3682: MatCheckPreallocated(A, 1);
3684: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3685: if (!A->ops->matsolve) {
3686: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3687: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3688: } else PetscUseTypeMethod(A, matsolve, B, X);
3689: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3690: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3691: PetscFunctionReturn(PETSC_SUCCESS);
3692: }
3694: /*@
3695: MatMatSolveTranspose - Solves A^T X = B, given a factored matrix.
3697: Neighbor-wise Collective
3699: Input Parameters:
3700: + A - the factored matrix
3701: - B - the right-hand-side matrix (`MATDENSE` matrix)
3703: Output Parameter:
3704: . X - the result matrix (dense matrix)
3706: Level: developer
3708: Note:
3709: The matrices `B` and `X` cannot be the same. I.e., one cannot
3710: call `MatMatSolveTranspose`(A,X,X).
3712: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3713: @*/
3714: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3715: {
3716: PetscFunctionBegin;
3721: PetscCheckSameComm(A, 1, B, 2);
3722: PetscCheckSameComm(A, 1, X, 3);
3723: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3724: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3725: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3726: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3727: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3728: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3729: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3730: MatCheckPreallocated(A, 1);
3732: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3733: if (!A->ops->matsolvetranspose) {
3734: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3735: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3736: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3737: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3738: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3739: PetscFunctionReturn(PETSC_SUCCESS);
3740: }
3742: /*@
3743: MatMatTransposeSolve - Solves A X = B^T, given a factored matrix.
3745: Neighbor-wise Collective
3747: Input Parameters:
3748: + A - the factored matrix
3749: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3751: Output Parameter:
3752: . X - the result matrix (dense matrix)
3754: Level: developer
3756: Note:
3757: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right hand side matrix. User must create B^T in sparse compressed row
3758: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3760: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3761: @*/
3762: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3763: {
3764: PetscFunctionBegin;
3769: PetscCheckSameComm(A, 1, Bt, 2);
3770: PetscCheckSameComm(A, 1, X, 3);
3772: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3773: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3774: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3775: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3776: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3777: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3778: MatCheckPreallocated(A, 1);
3780: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3781: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3782: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3783: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3784: PetscFunctionReturn(PETSC_SUCCESS);
3785: }
3787: /*@
3788: MatForwardSolve - Solves L x = b, given a factored matrix, A = LU, or
3789: U^T*D^(1/2) x = b, given a factored symmetric matrix, A = U^T*D*U,
3791: Neighbor-wise Collective
3793: Input Parameters:
3794: + mat - the factored matrix
3795: - b - the right-hand-side vector
3797: Output Parameter:
3798: . x - the result vector
3800: Level: developer
3802: Notes:
3803: `MatSolve()` should be used for most applications, as it performs
3804: a forward solve followed by a backward solve.
3806: The vectors `b` and `x` cannot be the same, i.e., one cannot
3807: call `MatForwardSolve`(A,x,x).
3809: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3810: the diagonal blocks are not implemented as D = D^(1/2) * D^(1/2) yet.
3811: `MatForwardSolve()` solves U^T*D y = b, and
3812: `MatBackwardSolve()` solves U x = y.
3813: Thus they do not provide a symmetric preconditioner.
3815: .seealso: [](chapter_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`, `MatBackwardSolve()`
3816: @*/
3817: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3818: {
3819: PetscFunctionBegin;
3824: PetscCheckSameComm(mat, 1, b, 2);
3825: PetscCheckSameComm(mat, 1, x, 3);
3826: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3827: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3828: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3829: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3830: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3831: MatCheckPreallocated(mat, 1);
3833: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3834: PetscUseTypeMethod(mat, forwardsolve, b, x);
3835: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3836: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3837: PetscFunctionReturn(PETSC_SUCCESS);
3838: }
3840: /*@
3841: MatBackwardSolve - Solves U x = b, given a factored matrix, A = LU.
3842: D^(1/2) U x = b, given a factored symmetric matrix, A = U^T*D*U,
3844: Neighbor-wise Collective
3846: Input Parameters:
3847: + mat - the factored matrix
3848: - b - the right-hand-side vector
3850: Output Parameter:
3851: . x - the result vector
3853: Level: developer
3855: Notes:
3856: `MatSolve()` should be used for most applications, as it performs
3857: a forward solve followed by a backward solve.
3859: The vectors `b` and `x` cannot be the same. I.e., one cannot
3860: call `MatBackwardSolve`(A,x,x).
3862: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3863: the diagonal blocks are not implemented as D = D^(1/2) * D^(1/2) yet.
3864: `MatForwardSolve()` solves U^T*D y = b, and
3865: `MatBackwardSolve()` solves U x = y.
3866: Thus they do not provide a symmetric preconditioner.
3868: .seealso: [](chapter_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`, `MatForwardSolve()`
3869: @*/
3870: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3871: {
3872: PetscFunctionBegin;
3877: PetscCheckSameComm(mat, 1, b, 2);
3878: PetscCheckSameComm(mat, 1, x, 3);
3879: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3880: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3881: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3882: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3883: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3884: MatCheckPreallocated(mat, 1);
3886: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3887: PetscUseTypeMethod(mat, backwardsolve, b, x);
3888: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3889: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3890: PetscFunctionReturn(PETSC_SUCCESS);
3891: }
3893: /*@
3894: MatSolveAdd - Computes x = y + inv(A)*b, given a factored matrix.
3896: Neighbor-wise Collective
3898: Input Parameters:
3899: + mat - the factored matrix
3900: . b - the right-hand-side vector
3901: - y - the vector to be added to
3903: Output Parameter:
3904: . x - the result vector
3906: Level: developer
3908: Note:
3909: The vectors `b` and `x` cannot be the same. I.e., one cannot
3910: call `MatSolveAdd`(A,x,y,x).
3912: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3913: @*/
3914: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
3915: {
3916: PetscScalar one = 1.0;
3917: Vec tmp;
3919: PetscFunctionBegin;
3925: PetscCheckSameComm(mat, 1, b, 2);
3926: PetscCheckSameComm(mat, 1, y, 3);
3927: PetscCheckSameComm(mat, 1, x, 4);
3928: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3929: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3930: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3931: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
3932: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3933: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
3934: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3935: MatCheckPreallocated(mat, 1);
3937: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
3938: if (mat->factorerrortype) {
3939: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3940: PetscCall(VecSetInf(x));
3941: } else if (mat->ops->solveadd) {
3942: PetscUseTypeMethod(mat, solveadd, b, y, x);
3943: } else {
3944: /* do the solve then the add manually */
3945: if (x != y) {
3946: PetscCall(MatSolve(mat, b, x));
3947: PetscCall(VecAXPY(x, one, y));
3948: } else {
3949: PetscCall(VecDuplicate(x, &tmp));
3950: PetscCall(VecCopy(x, tmp));
3951: PetscCall(MatSolve(mat, b, x));
3952: PetscCall(VecAXPY(x, one, tmp));
3953: PetscCall(VecDestroy(&tmp));
3954: }
3955: }
3956: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
3957: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3958: PetscFunctionReturn(PETSC_SUCCESS);
3959: }
3961: /*@
3962: MatSolveTranspose - Solves A' x = b, given a factored matrix.
3964: Neighbor-wise Collective
3966: Input Parameters:
3967: + mat - the factored matrix
3968: - b - the right-hand-side vector
3970: Output Parameter:
3971: . x - the result vector
3973: Level: developer
3975: Notes:
3976: The vectors `b` and `x` cannot be the same. I.e., one cannot
3977: call `MatSolveTranspose`(A,x,x).
3979: Most users should employ the `KSP` interface for linear solvers
3980: instead of working directly with matrix algebra routines such as this.
3981: See, e.g., `KSPCreate()`.
3983: .seealso: [](chapter_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
3984: @*/
3985: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
3986: {
3987: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
3989: PetscFunctionBegin;
3994: PetscCheckSameComm(mat, 1, b, 2);
3995: PetscCheckSameComm(mat, 1, x, 3);
3996: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3997: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
3998: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
3999: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4000: MatCheckPreallocated(mat, 1);
4001: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4002: if (mat->factorerrortype) {
4003: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4004: PetscCall(VecSetInf(x));
4005: } else {
4006: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4007: PetscCall((*f)(mat, b, x));
4008: }
4009: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4010: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4011: PetscFunctionReturn(PETSC_SUCCESS);
4012: }
4014: /*@
4015: MatSolveTransposeAdd - Computes x = y + inv(Transpose(A)) b, given a
4016: factored matrix.
4018: Neighbor-wise Collective
4020: Input Parameters:
4021: + mat - the factored matrix
4022: . b - the right-hand-side vector
4023: - y - the vector to be added to
4025: Output Parameter:
4026: . x - the result vector
4028: Level: developer
4030: Note:
4031: The vectors `b` and `x` cannot be the same. I.e., one cannot
4032: call `MatSolveTransposeAdd`(A,x,y,x).
4034: .seealso: [](chapter_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4035: @*/
4036: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4037: {
4038: PetscScalar one = 1.0;
4039: Vec tmp;
4040: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4042: PetscFunctionBegin;
4048: PetscCheckSameComm(mat, 1, b, 2);
4049: PetscCheckSameComm(mat, 1, y, 3);
4050: PetscCheckSameComm(mat, 1, x, 4);
4051: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4052: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4053: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4054: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4055: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4056: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4057: MatCheckPreallocated(mat, 1);
4059: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4060: if (mat->factorerrortype) {
4061: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4062: PetscCall(VecSetInf(x));
4063: } else if (f) {
4064: PetscCall((*f)(mat, b, y, x));
4065: } else {
4066: /* do the solve then the add manually */
4067: if (x != y) {
4068: PetscCall(MatSolveTranspose(mat, b, x));
4069: PetscCall(VecAXPY(x, one, y));
4070: } else {
4071: PetscCall(VecDuplicate(x, &tmp));
4072: PetscCall(VecCopy(x, tmp));
4073: PetscCall(MatSolveTranspose(mat, b, x));
4074: PetscCall(VecAXPY(x, one, tmp));
4075: PetscCall(VecDestroy(&tmp));
4076: }
4077: }
4078: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4079: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4080: PetscFunctionReturn(PETSC_SUCCESS);
4081: }
4083: /*@
4084: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4086: Neighbor-wise Collective
4088: Input Parameters:
4089: + mat - the matrix
4090: . b - the right hand side
4091: . omega - the relaxation factor
4092: . flag - flag indicating the type of SOR (see below)
4093: . shift - diagonal shift
4094: . its - the number of iterations
4095: - lits - the number of local iterations
4097: Output Parameter:
4098: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4100: SOR Flags:
4101: + `SOR_FORWARD_SWEEP` - forward SOR
4102: . `SOR_BACKWARD_SWEEP` - backward SOR
4103: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4104: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4105: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4106: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4107: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4108: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4109: upper/lower triangular part of matrix to
4110: vector (with omega)
4111: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4113: Level: developer
4115: Notes:
4116: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4117: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4118: on each processor.
4120: Application programmers will not generally use `MatSOR()` directly,
4121: but instead will employ the `KSP`/`PC` interface.
4123: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4125: Most users should employ the `KSP` interface for linear solvers
4126: instead of working directly with matrix algebra routines such as this.
4127: See, e.g., `KSPCreate()`.
4129: Vectors `x` and `b` CANNOT be the same
4131: The flags are implemented as bitwise inclusive or operations.
4132: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4133: to specify a zero initial guess for SSOR.
4135: Developer Note:
4136: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4138: .seealso: [](chapter_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4139: @*/
4140: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4141: {
4142: PetscFunctionBegin;
4147: PetscCheckSameComm(mat, 1, b, 2);
4148: PetscCheckSameComm(mat, 1, x, 8);
4149: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4150: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4151: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4152: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4153: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4154: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4155: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4156: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4158: MatCheckPreallocated(mat, 1);
4159: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4160: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4161: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4162: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4163: PetscFunctionReturn(PETSC_SUCCESS);
4164: }
4166: /*
4167: Default matrix copy routine.
4168: */
4169: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4170: {
4171: PetscInt i, rstart = 0, rend = 0, nz;
4172: const PetscInt *cwork;
4173: const PetscScalar *vwork;
4175: PetscFunctionBegin;
4176: if (B->assembled) PetscCall(MatZeroEntries(B));
4177: if (str == SAME_NONZERO_PATTERN) {
4178: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4179: for (i = rstart; i < rend; i++) {
4180: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4181: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4182: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4183: }
4184: } else {
4185: PetscCall(MatAYPX(B, 0.0, A, str));
4186: }
4187: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4188: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4189: PetscFunctionReturn(PETSC_SUCCESS);
4190: }
4192: /*@
4193: MatCopy - Copies a matrix to another matrix.
4195: Collective
4197: Input Parameters:
4198: + A - the matrix
4199: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4201: Output Parameter:
4202: . B - where the copy is put
4204: Level: intermediate
4206: Notes:
4207: If you use `SAME_NONZERO_PATTERN` then the two matrices must have the same nonzero pattern or the routine will crash.
4209: `MatCopy()` copies the matrix entries of a matrix to another existing
4210: matrix (after first zeroing the second matrix). A related routine is
4211: `MatConvert()`, which first creates a new matrix and then copies the data.
4213: .seealso: [](chapter_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4214: @*/
4215: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4216: {
4217: PetscInt i;
4219: PetscFunctionBegin;
4224: PetscCheckSameComm(A, 1, B, 2);
4225: MatCheckPreallocated(B, 2);
4226: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4227: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4228: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4229: A->cmap->N, B->cmap->N);
4230: MatCheckPreallocated(A, 1);
4231: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4233: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4234: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4235: else PetscCall(MatCopy_Basic(A, B, str));
4237: B->stencil.dim = A->stencil.dim;
4238: B->stencil.noc = A->stencil.noc;
4239: for (i = 0; i <= A->stencil.dim; i++) {
4240: B->stencil.dims[i] = A->stencil.dims[i];
4241: B->stencil.starts[i] = A->stencil.starts[i];
4242: }
4244: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4245: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4246: PetscFunctionReturn(PETSC_SUCCESS);
4247: }
4249: /*@C
4250: MatConvert - Converts a matrix to another matrix, either of the same
4251: or different type.
4253: Collective
4255: Input Parameters:
4256: + mat - the matrix
4257: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4258: same type as the original matrix.
4259: - reuse - denotes if the destination matrix is to be created or reused.
4260: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4261: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4263: Output Parameter:
4264: . M - pointer to place new matrix
4266: Level: intermediate
4268: Notes:
4269: `MatConvert()` first creates a new matrix and then copies the data from
4270: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4271: entries of one matrix to another already existing matrix context.
4273: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4274: the MPI communicator of the generated matrix is always the same as the communicator
4275: of the input matrix.
4277: .seealso: [](chapter_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4278: @*/
4279: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4280: {
4281: PetscBool sametype, issame, flg;
4282: PetscBool3 issymmetric, ishermitian;
4283: char convname[256], mtype[256];
4284: Mat B;
4286: PetscFunctionBegin;
4290: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4291: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4292: MatCheckPreallocated(mat, 1);
4294: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4295: if (flg) newtype = mtype;
4297: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4298: PetscCall(PetscStrcmp(newtype, "same", &issame));
4299: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4300: PetscCheck(!(reuse == MAT_REUSE_MATRIX) || !(mat == *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4302: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4303: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4304: PetscFunctionReturn(PETSC_SUCCESS);
4305: }
4307: /* Cache Mat options because some converters use MatHeaderReplace */
4308: issymmetric = mat->symmetric;
4309: ishermitian = mat->hermitian;
4311: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4312: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4313: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4314: } else {
4315: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4316: const char *prefix[3] = {"seq", "mpi", ""};
4317: PetscInt i;
4318: /*
4319: Order of precedence:
4320: 0) See if newtype is a superclass of the current matrix.
4321: 1) See if a specialized converter is known to the current matrix.
4322: 2) See if a specialized converter is known to the desired matrix class.
4323: 3) See if a good general converter is registered for the desired class
4324: (as of 6/27/03 only MATMPIADJ falls into this category).
4325: 4) See if a good general converter is known for the current matrix.
4326: 5) Use a really basic converter.
4327: */
4329: /* 0) See if newtype is a superclass of the current matrix.
4330: i.e mat is mpiaij and newtype is aij */
4331: for (i = 0; i < 2; i++) {
4332: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4333: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4334: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4335: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4336: if (flg) {
4337: if (reuse == MAT_INPLACE_MATRIX) {
4338: PetscCall(PetscInfo(mat, "Early return\n"));
4339: PetscFunctionReturn(PETSC_SUCCESS);
4340: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4341: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4342: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4343: PetscFunctionReturn(PETSC_SUCCESS);
4344: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4345: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4346: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4347: PetscFunctionReturn(PETSC_SUCCESS);
4348: }
4349: }
4350: }
4351: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4352: for (i = 0; i < 3; i++) {
4353: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4354: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4355: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4356: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4357: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4358: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4359: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4360: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4361: if (conv) goto foundconv;
4362: }
4364: /* 2) See if a specialized converter is known to the desired matrix class. */
4365: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4366: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4367: PetscCall(MatSetType(B, newtype));
4368: for (i = 0; i < 3; i++) {
4369: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4370: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4371: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4372: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4373: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4374: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4375: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4376: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4377: if (conv) {
4378: PetscCall(MatDestroy(&B));
4379: goto foundconv;
4380: }
4381: }
4383: /* 3) See if a good general converter is registered for the desired class */
4384: conv = B->ops->convertfrom;
4385: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4386: PetscCall(MatDestroy(&B));
4387: if (conv) goto foundconv;
4389: /* 4) See if a good general converter is known for the current matrix */
4390: if (mat->ops->convert) conv = mat->ops->convert;
4391: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4392: if (conv) goto foundconv;
4394: /* 5) Use a really basic converter. */
4395: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4396: conv = MatConvert_Basic;
4398: foundconv:
4399: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4400: PetscCall((*conv)(mat, newtype, reuse, M));
4401: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4402: /* the block sizes must be same if the mappings are copied over */
4403: (*M)->rmap->bs = mat->rmap->bs;
4404: (*M)->cmap->bs = mat->cmap->bs;
4405: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4406: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4407: (*M)->rmap->mapping = mat->rmap->mapping;
4408: (*M)->cmap->mapping = mat->cmap->mapping;
4409: }
4410: (*M)->stencil.dim = mat->stencil.dim;
4411: (*M)->stencil.noc = mat->stencil.noc;
4412: for (i = 0; i <= mat->stencil.dim; i++) {
4413: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4414: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4415: }
4416: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4417: }
4418: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4420: /* Copy Mat options */
4421: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4422: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4423: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4424: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4425: PetscFunctionReturn(PETSC_SUCCESS);
4426: }
4428: /*@C
4429: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4431: Not Collective
4433: Input Parameter:
4434: . mat - the matrix, must be a factored matrix
4436: Output Parameter:
4437: . type - the string name of the package (do not free this string)
4439: Level: intermediate
4441: Fortran Note:
4442: Pass in an empty string and the package name will be copied into it. Make sure the string is long enough.
4444: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`
4445: @*/
4446: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4447: {
4448: PetscErrorCode (*conv)(Mat, MatSolverType *);
4450: PetscFunctionBegin;
4454: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4455: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4456: if (conv) PetscCall((*conv)(mat, type));
4457: else *type = MATSOLVERPETSC;
4458: PetscFunctionReturn(PETSC_SUCCESS);
4459: }
4461: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4462: struct _MatSolverTypeForSpecifcType {
4463: MatType mtype;
4464: /* no entry for MAT_FACTOR_NONE */
4465: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4466: MatSolverTypeForSpecifcType next;
4467: };
4469: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4470: struct _MatSolverTypeHolder {
4471: char *name;
4472: MatSolverTypeForSpecifcType handlers;
4473: MatSolverTypeHolder next;
4474: };
4476: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4478: /*@C
4479: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4481: Input Parameters:
4482: + package - name of the package, for example petsc or superlu
4483: . mtype - the matrix type that works with this package
4484: . ftype - the type of factorization supported by the package
4485: - createfactor - routine that will create the factored matrix ready to be used
4487: Level: developer
4489: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`
4490: @*/
4491: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4492: {
4493: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4494: PetscBool flg;
4495: MatSolverTypeForSpecifcType inext, iprev = NULL;
4497: PetscFunctionBegin;
4498: PetscCall(MatInitializePackage());
4499: if (!next) {
4500: PetscCall(PetscNew(&MatSolverTypeHolders));
4501: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4502: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4503: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4504: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4505: PetscFunctionReturn(PETSC_SUCCESS);
4506: }
4507: while (next) {
4508: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4509: if (flg) {
4510: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4511: inext = next->handlers;
4512: while (inext) {
4513: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4514: if (flg) {
4515: inext->createfactor[(int)ftype - 1] = createfactor;
4516: PetscFunctionReturn(PETSC_SUCCESS);
4517: }
4518: iprev = inext;
4519: inext = inext->next;
4520: }
4521: PetscCall(PetscNew(&iprev->next));
4522: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4523: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4524: PetscFunctionReturn(PETSC_SUCCESS);
4525: }
4526: prev = next;
4527: next = next->next;
4528: }
4529: PetscCall(PetscNew(&prev->next));
4530: PetscCall(PetscStrallocpy(package, &prev->next->name));
4531: PetscCall(PetscNew(&prev->next->handlers));
4532: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4533: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4534: PetscFunctionReturn(PETSC_SUCCESS);
4535: }
4537: /*@C
4538: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4540: Input Parameters:
4541: + type - name of the package, for example petsc or superlu
4542: . ftype - the type of factorization supported by the type
4543: - mtype - the matrix type that works with this type
4545: Output Parameters:
4546: + foundtype - `PETSC_TRUE` if the type was registered
4547: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4548: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4550: Level: developer
4552: .seealso: [](chapter_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`
4553: @*/
4554: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat, MatFactorType, Mat *))
4555: {
4556: MatSolverTypeHolder next = MatSolverTypeHolders;
4557: PetscBool flg;
4558: MatSolverTypeForSpecifcType inext;
4560: PetscFunctionBegin;
4561: if (foundtype) *foundtype = PETSC_FALSE;
4562: if (foundmtype) *foundmtype = PETSC_FALSE;
4563: if (createfactor) *createfactor = NULL;
4565: if (type) {
4566: while (next) {
4567: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4568: if (flg) {
4569: if (foundtype) *foundtype = PETSC_TRUE;
4570: inext = next->handlers;
4571: while (inext) {
4572: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4573: if (flg) {
4574: if (foundmtype) *foundmtype = PETSC_TRUE;
4575: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4576: PetscFunctionReturn(PETSC_SUCCESS);
4577: }
4578: inext = inext->next;
4579: }
4580: }
4581: next = next->next;
4582: }
4583: } else {
4584: while (next) {
4585: inext = next->handlers;
4586: while (inext) {
4587: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4588: if (flg && inext->createfactor[(int)ftype - 1]) {
4589: if (foundtype) *foundtype = PETSC_TRUE;
4590: if (foundmtype) *foundmtype = PETSC_TRUE;
4591: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4592: PetscFunctionReturn(PETSC_SUCCESS);
4593: }
4594: inext = inext->next;
4595: }
4596: next = next->next;
4597: }
4598: /* try with base classes inext->mtype */
4599: next = MatSolverTypeHolders;
4600: while (next) {
4601: inext = next->handlers;
4602: while (inext) {
4603: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4604: if (flg && inext->createfactor[(int)ftype - 1]) {
4605: if (foundtype) *foundtype = PETSC_TRUE;
4606: if (foundmtype) *foundmtype = PETSC_TRUE;
4607: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4608: PetscFunctionReturn(PETSC_SUCCESS);
4609: }
4610: inext = inext->next;
4611: }
4612: next = next->next;
4613: }
4614: }
4615: PetscFunctionReturn(PETSC_SUCCESS);
4616: }
4618: PetscErrorCode MatSolverTypeDestroy(void)
4619: {
4620: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4621: MatSolverTypeForSpecifcType inext, iprev;
4623: PetscFunctionBegin;
4624: while (next) {
4625: PetscCall(PetscFree(next->name));
4626: inext = next->handlers;
4627: while (inext) {
4628: PetscCall(PetscFree(inext->mtype));
4629: iprev = inext;
4630: inext = inext->next;
4631: PetscCall(PetscFree(iprev));
4632: }
4633: prev = next;
4634: next = next->next;
4635: PetscCall(PetscFree(prev));
4636: }
4637: MatSolverTypeHolders = NULL;
4638: PetscFunctionReturn(PETSC_SUCCESS);
4639: }
4641: /*@C
4642: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4644: Logically Collective
4646: Input Parameter:
4647: . mat - the matrix
4649: Output Parameter:
4650: . flg - `PETSC_TRUE` if uses the ordering
4652: Level: developer
4654: Note:
4655: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4656: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4658: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4659: @*/
4660: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4661: {
4662: PetscFunctionBegin;
4663: *flg = mat->canuseordering;
4664: PetscFunctionReturn(PETSC_SUCCESS);
4665: }
4667: /*@C
4668: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4670: Logically Collective
4672: Input Parameters:
4673: + mat - the matrix obtained with `MatGetFactor()`
4674: - ftype - the factorization type to be used
4676: Output Parameter:
4677: . otype - the preferred ordering type
4679: Level: developer
4681: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4682: @*/
4683: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4684: {
4685: PetscFunctionBegin;
4686: *otype = mat->preferredordering[ftype];
4687: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4688: PetscFunctionReturn(PETSC_SUCCESS);
4689: }
4691: /*@C
4692: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic()
4694: Collective
4696: Input Parameters:
4697: + mat - the matrix
4698: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4699: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4701: Output Parameter:
4702: . f - the factor matrix used with MatXXFactorSymbolic() calls
4704: Options Database Key:
4705: . -mat_factor_bind_factorization <host, device> - Where to do matrix factorization? Default is device (might consume more device memory.
4706: One can choose host to save device memory). Currently only supported with `MATSEQAIJCUSPARSE` matrices.
4708: Level: intermediate
4710: Notes:
4711: Users usually access the factorization solvers via `KSP`
4713: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4714: such as pastix, superlu, mumps etc.
4716: PETSc must have been ./configure to use the external solver, using the option --download-package
4718: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4719: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4720: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4722: Developer Note:
4723: This should actually be called `MatCreateFactor()` since it creates a new factor object
4725: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`,
4726: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4727: @*/
4728: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4729: {
4730: PetscBool foundtype, foundmtype;
4731: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4733: PetscFunctionBegin;
4737: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4738: MatCheckPreallocated(mat, 1);
4740: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4741: if (!foundtype) {
4742: if (type) {
4743: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4744: ((PetscObject)mat)->type_name, type);
4745: } else {
4746: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4747: }
4748: }
4749: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4750: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4752: PetscCall((*conv)(mat, ftype, f));
4753: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4754: PetscFunctionReturn(PETSC_SUCCESS);
4755: }
4757: /*@C
4758: MatGetFactorAvailable - Returns a a flag if matrix supports particular type and factor type
4760: Not Collective
4762: Input Parameters:
4763: + mat - the matrix
4764: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4765: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4767: Output Parameter:
4768: . flg - PETSC_TRUE if the factorization is available
4770: Level: intermediate
4772: Notes:
4773: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4774: such as pastix, superlu, mumps etc.
4776: PETSc must have been ./configure to use the external solver, using the option --download-package
4778: Developer Note:
4779: This should actually be called MatCreateFactorAvailable() since MatGetFactor() creates a new factor object
4781: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactor()`, `MatSolverTypeRegister()`,
4782: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4783: @*/
4784: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4785: {
4786: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4788: PetscFunctionBegin;
4793: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4794: MatCheckPreallocated(mat, 1);
4796: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4797: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4798: PetscFunctionReturn(PETSC_SUCCESS);
4799: }
4801: /*@
4802: MatDuplicate - Duplicates a matrix including the non-zero structure.
4804: Collective
4806: Input Parameters:
4807: + mat - the matrix
4808: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4809: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4811: Output Parameter:
4812: . M - pointer to place new matrix
4814: Level: intermediate
4816: Notes:
4817: You cannot change the nonzero pattern for the parent or child matrix if you use `MAT_SHARE_NONZERO_PATTERN`.
4819: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4821: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the simple matrix data structure of mat
4822: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4823: User should not use `MatDuplicate()` to create new matrix M if M is intended to be reused as the product of matrix operation.
4825: .seealso: [](chapter_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4826: @*/
4827: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4828: {
4829: Mat B;
4830: VecType vtype;
4831: PetscInt i;
4832: PetscObject dm;
4833: void (*viewf)(void);
4835: PetscFunctionBegin;
4839: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4840: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4841: MatCheckPreallocated(mat, 1);
4843: *M = NULL;
4844: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4845: PetscUseTypeMethod(mat, duplicate, op, M);
4846: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4847: B = *M;
4849: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4850: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4851: PetscCall(MatGetVecType(mat, &vtype));
4852: PetscCall(MatSetVecType(B, vtype));
4854: B->stencil.dim = mat->stencil.dim;
4855: B->stencil.noc = mat->stencil.noc;
4856: for (i = 0; i <= mat->stencil.dim; i++) {
4857: B->stencil.dims[i] = mat->stencil.dims[i];
4858: B->stencil.starts[i] = mat->stencil.starts[i];
4859: }
4861: B->nooffproczerorows = mat->nooffproczerorows;
4862: B->nooffprocentries = mat->nooffprocentries;
4864: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4865: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4866: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4867: PetscFunctionReturn(PETSC_SUCCESS);
4868: }
4870: /*@
4871: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
4873: Logically Collective
4875: Input Parameter:
4876: . mat - the matrix
4878: Output Parameter:
4879: . v - the diagonal of the matrix
4881: Level: intermediate
4883: Note:
4884: Currently only correct in parallel for square matrices.
4886: .seealso: [](chapter_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
4887: @*/
4888: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
4889: {
4890: PetscFunctionBegin;
4894: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4895: MatCheckPreallocated(mat, 1);
4897: PetscUseTypeMethod(mat, getdiagonal, v);
4898: PetscCall(PetscObjectStateIncrease((PetscObject)v));
4899: PetscFunctionReturn(PETSC_SUCCESS);
4900: }
4902: /*@C
4903: MatGetRowMin - Gets the minimum value (of the real part) of each
4904: row of the matrix
4906: Logically Collective
4908: Input Parameter:
4909: . mat - the matrix
4911: Output Parameters:
4912: + v - the vector for storing the maximums
4913: - idx - the indices of the column found for each row (optional)
4915: Level: intermediate
4917: Note:
4918: The result of this call are the same as if one converted the matrix to dense format
4919: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
4921: This code is only implemented for a couple of matrix formats.
4923: .seealso: [](chapter_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
4924: `MatGetRowMax()`
4925: @*/
4926: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
4927: {
4928: PetscFunctionBegin;
4932: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4934: if (!mat->cmap->N) {
4935: PetscCall(VecSet(v, PETSC_MAX_REAL));
4936: if (idx) {
4937: PetscInt i, m = mat->rmap->n;
4938: for (i = 0; i < m; i++) idx[i] = -1;
4939: }
4940: } else {
4941: MatCheckPreallocated(mat, 1);
4942: }
4943: PetscUseTypeMethod(mat, getrowmin, v, idx);
4944: PetscCall(PetscObjectStateIncrease((PetscObject)v));
4945: PetscFunctionReturn(PETSC_SUCCESS);
4946: }
4948: /*@C
4949: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
4950: row of the matrix
4952: Logically Collective
4954: Input Parameter:
4955: . mat - the matrix
4957: Output Parameters:
4958: + v - the vector for storing the minimums
4959: - idx - the indices of the column found for each row (or `NULL` if not needed)
4961: Level: intermediate
4963: Notes:
4964: if a row is completely empty or has only 0.0 values then the idx[] value for that
4965: row is 0 (the first column).
4967: This code is only implemented for a couple of matrix formats.
4969: .seealso: [](chapter_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
4970: @*/
4971: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
4972: {
4973: PetscFunctionBegin;
4977: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4978: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4980: if (!mat->cmap->N) {
4981: PetscCall(VecSet(v, 0.0));
4982: if (idx) {
4983: PetscInt i, m = mat->rmap->n;
4984: for (i = 0; i < m; i++) idx[i] = -1;
4985: }
4986: } else {
4987: MatCheckPreallocated(mat, 1);
4988: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
4989: PetscUseTypeMethod(mat, getrowminabs, v, idx);
4990: }
4991: PetscCall(PetscObjectStateIncrease((PetscObject)v));
4992: PetscFunctionReturn(PETSC_SUCCESS);
4993: }
4995: /*@C
4996: MatGetRowMax - Gets the maximum value (of the real part) of each
4997: row of the matrix
4999: Logically Collective
5001: Input Parameter:
5002: . mat - the matrix
5004: Output Parameters:
5005: + v - the vector for storing the maximums
5006: - idx - the indices of the column found for each row (optional)
5008: Level: intermediate
5010: Notes:
5011: The result of this call are the same as if one converted the matrix to dense format
5012: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5014: This code is only implemented for a couple of matrix formats.
5016: .seealso: [](chapter_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5017: @*/
5018: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5019: {
5020: PetscFunctionBegin;
5024: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5026: if (!mat->cmap->N) {
5027: PetscCall(VecSet(v, PETSC_MIN_REAL));
5028: if (idx) {
5029: PetscInt i, m = mat->rmap->n;
5030: for (i = 0; i < m; i++) idx[i] = -1;
5031: }
5032: } else {
5033: MatCheckPreallocated(mat, 1);
5034: PetscUseTypeMethod(mat, getrowmax, v, idx);
5035: }
5036: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5037: PetscFunctionReturn(PETSC_SUCCESS);
5038: }
5040: /*@C
5041: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5042: row of the matrix
5044: Logically Collective
5046: Input Parameter:
5047: . mat - the matrix
5049: Output Parameters:
5050: + v - the vector for storing the maximums
5051: - idx - the indices of the column found for each row (or `NULL` if not needed)
5053: Level: intermediate
5055: Notes:
5056: if a row is completely empty or has only 0.0 values then the idx[] value for that
5057: row is 0 (the first column).
5059: This code is only implemented for a couple of matrix formats.
5061: .seealso: [](chapter_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5062: @*/
5063: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5064: {
5065: PetscFunctionBegin;
5069: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5071: if (!mat->cmap->N) {
5072: PetscCall(VecSet(v, 0.0));
5073: if (idx) {
5074: PetscInt i, m = mat->rmap->n;
5075: for (i = 0; i < m; i++) idx[i] = -1;
5076: }
5077: } else {
5078: MatCheckPreallocated(mat, 1);
5079: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5080: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5081: }
5082: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5083: PetscFunctionReturn(PETSC_SUCCESS);
5084: }
5086: /*@
5087: MatGetRowSum - Gets the sum of each row of the matrix
5089: Logically or Neighborhood Collective
5091: Input Parameter:
5092: . mat - the matrix
5094: Output Parameter:
5095: . v - the vector for storing the sum of rows
5097: Level: intermediate
5099: Notes:
5100: This code is slow since it is not currently specialized for different formats
5102: .seealso: [](chapter_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`
5103: @*/
5104: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5105: {
5106: Vec ones;
5108: PetscFunctionBegin;
5112: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5113: MatCheckPreallocated(mat, 1);
5114: PetscCall(MatCreateVecs(mat, &ones, NULL));
5115: PetscCall(VecSet(ones, 1.));
5116: PetscCall(MatMult(mat, ones, v));
5117: PetscCall(VecDestroy(&ones));
5118: PetscFunctionReturn(PETSC_SUCCESS);
5119: }
5121: /*@
5122: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5123: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5125: Collective
5127: Input Parameter:
5128: . mat - the matrix to provide the transpose
5130: Output Parameter:
5131: . mat - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5133: Level: advanced
5135: Note:
5136: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5137: routine allows bypassing that call.
5139: .seealso: [](chapter_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5140: @*/
5141: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5142: {
5143: PetscContainer rB = NULL;
5144: MatParentState *rb = NULL;
5146: PetscFunctionBegin;
5147: PetscCall(PetscNew(&rb));
5148: rb->id = ((PetscObject)mat)->id;
5149: rb->state = 0;
5150: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5151: PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)B), &rB));
5152: PetscCall(PetscContainerSetPointer(rB, rb));
5153: PetscCall(PetscContainerSetUserDestroy(rB, PetscContainerUserDestroyDefault));
5154: PetscCall(PetscObjectCompose((PetscObject)B, "MatTransposeParent", (PetscObject)rB));
5155: PetscCall(PetscObjectDereference((PetscObject)rB));
5156: PetscFunctionReturn(PETSC_SUCCESS);
5157: }
5159: /*@
5160: MatTranspose - Computes an in-place or out-of-place transpose of a matrix.
5162: Collective
5164: Input Parameters:
5165: + mat - the matrix to transpose
5166: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5168: Output Parameter:
5169: . B - the transpose
5171: Level: intermediate
5173: Notes:
5174: If you use `MAT_INPLACE_MATRIX` then you must pass in &mat for B
5176: `MAT_REUSE_MATRIX` uses the B matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX`. If you already have a matrix to contain the
5177: transpose, call `MatTransposeSetPrecursor`(mat,B) before calling this routine.
5179: If the nonzero structure of mat changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5181: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.
5183: If mat is unchanged from the last call this function returns immediately without recomputing the result
5185: If you only need the symbolic transpose, and not the numerical values, use `MatTransposeSymbolic()`
5187: .seealso: [](chapter_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5188: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5189: @*/
5190: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5191: {
5192: PetscContainer rB = NULL;
5193: MatParentState *rb = NULL;
5195: PetscFunctionBegin;
5198: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5199: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5200: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5201: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5202: MatCheckPreallocated(mat, 1);
5203: if (reuse == MAT_REUSE_MATRIX) {
5204: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5205: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5206: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5207: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5208: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5209: }
5211: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5212: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5213: PetscUseTypeMethod(mat, transpose, reuse, B);
5214: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5215: }
5216: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5218: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5219: if (reuse != MAT_INPLACE_MATRIX) {
5220: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5221: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5222: rb->state = ((PetscObject)mat)->state;
5223: rb->nonzerostate = mat->nonzerostate;
5224: }
5225: PetscFunctionReturn(PETSC_SUCCESS);
5226: }
5228: /*@
5229: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5231: Collective
5233: Input Parameter:
5234: . A - the matrix to transpose
5236: Output Parameter:
5237: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5238: numerical portion.
5240: Level: intermediate
5242: Note:
5243: This is not supported for many matrix types, use `MatTranspose()` in those cases
5245: .seealso: [](chapter_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5246: @*/
5247: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5248: {
5249: PetscFunctionBegin;
5252: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5253: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5254: PetscCheck(A->ops->transposesymbolic, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
5255: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5256: PetscCall((*A->ops->transposesymbolic)(A, B));
5257: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5259: PetscCall(MatTransposeSetPrecursor(A, *B));
5260: PetscFunctionReturn(PETSC_SUCCESS);
5261: }
5263: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5264: {
5265: PetscContainer rB;
5266: MatParentState *rb;
5268: PetscFunctionBegin;
5271: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5272: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5273: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5274: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5275: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5276: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5277: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5278: PetscFunctionReturn(PETSC_SUCCESS);
5279: }
5281: /*@
5282: MatIsTranspose - Test whether a matrix is another one's transpose,
5283: or its own, in which case it tests symmetry.
5285: Collective
5287: Input Parameters:
5288: + A - the matrix to test
5289: . B - the matrix to test against, this can equal the first parameter
5290: - tol - tolerance, differences between entries smaller than this are counted as zero
5292: Output Parameter:
5293: . flg - the result
5295: Level: intermediate
5297: Notes:
5298: Only available for `MATAIJ` matrices.
5300: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5301: test involves parallel copies of the block-offdiagonal parts of the matrix.
5303: .seealso: [](chapter_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5304: @*/
5305: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5306: {
5307: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5309: PetscFunctionBegin;
5313: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5314: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5315: *flg = PETSC_FALSE;
5316: if (f && g) {
5317: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5318: PetscCall((*f)(A, B, tol, flg));
5319: } else {
5320: MatType mattype;
5322: PetscCall(MatGetType(f ? B : A, &mattype));
5323: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5324: }
5325: PetscFunctionReturn(PETSC_SUCCESS);
5326: }
5328: /*@
5329: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5331: Collective
5333: Input Parameters:
5334: + mat - the matrix to transpose and complex conjugate
5335: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5337: Output Parameter:
5338: . B - the Hermitian transpose
5340: Level: intermediate
5342: .seealso: [](chapter_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5343: @*/
5344: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5345: {
5346: PetscFunctionBegin;
5347: PetscCall(MatTranspose(mat, reuse, B));
5348: #if defined(PETSC_USE_COMPLEX)
5349: PetscCall(MatConjugate(*B));
5350: #endif
5351: PetscFunctionReturn(PETSC_SUCCESS);
5352: }
5354: /*@
5355: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5357: Collective
5359: Input Parameters:
5360: + A - the matrix to test
5361: . B - the matrix to test against, this can equal the first parameter
5362: - tol - tolerance, differences between entries smaller than this are counted as zero
5364: Output Parameter:
5365: . flg - the result
5367: Level: intermediate
5369: Notes:
5370: Only available for `MATAIJ` matrices.
5372: The sequential algorithm
5373: has a running time of the order of the number of nonzeros; the parallel
5374: test involves parallel copies of the block-offdiagonal parts of the matrix.
5376: .seealso: [](chapter_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5377: @*/
5378: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5379: {
5380: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5382: PetscFunctionBegin;
5386: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5387: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5388: if (f && g) {
5389: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5390: PetscCall((*f)(A, B, tol, flg));
5391: }
5392: PetscFunctionReturn(PETSC_SUCCESS);
5393: }
5395: /*@
5396: MatPermute - Creates a new matrix with rows and columns permuted from the
5397: original.
5399: Collective
5401: Input Parameters:
5402: + mat - the matrix to permute
5403: . row - row permutation, each processor supplies only the permutation for its rows
5404: - col - column permutation, each processor supplies only the permutation for its columns
5406: Output Parameter:
5407: . B - the permuted matrix
5409: Level: advanced
5411: Note:
5412: The index sets map from row/col of permuted matrix to row/col of original matrix.
5413: The index sets should be on the same communicator as mat and have the same local sizes.
5415: Developer Note:
5416: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5417: exploit the fact that row and col are permutations, consider implementing the
5418: more general `MatCreateSubMatrix()` instead.
5420: .seealso: [](chapter_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5421: @*/
5422: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5423: {
5424: PetscFunctionBegin;
5430: PetscCheckSameComm(mat, 1, row, 2);
5431: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5432: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5433: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5434: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5435: MatCheckPreallocated(mat, 1);
5437: if (mat->ops->permute) {
5438: PetscUseTypeMethod(mat, permute, row, col, B);
5439: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5440: } else {
5441: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5442: }
5443: PetscFunctionReturn(PETSC_SUCCESS);
5444: }
5446: /*@
5447: MatEqual - Compares two matrices.
5449: Collective
5451: Input Parameters:
5452: + A - the first matrix
5453: - B - the second matrix
5455: Output Parameter:
5456: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5458: Level: intermediate
5460: .seealso: [](chapter_matrices), `Mat`
5461: @*/
5462: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5463: {
5464: PetscFunctionBegin;
5470: PetscCheckSameComm(A, 1, B, 2);
5471: MatCheckPreallocated(A, 1);
5472: MatCheckPreallocated(B, 2);
5473: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5474: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5475: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5476: B->cmap->N);
5477: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5478: PetscUseTypeMethod(A, equal, B, flg);
5479: } else {
5480: PetscCall(MatMultEqual(A, B, 10, flg));
5481: }
5482: PetscFunctionReturn(PETSC_SUCCESS);
5483: }
5485: /*@
5486: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5487: matrices that are stored as vectors. Either of the two scaling
5488: matrices can be `NULL`.
5490: Collective
5492: Input Parameters:
5493: + mat - the matrix to be scaled
5494: . l - the left scaling vector (or `NULL`)
5495: - r - the right scaling vector (or `NULL`)
5497: Level: intermediate
5499: Note:
5500: `MatDiagonalScale()` computes A = LAR, where
5501: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5502: The L scales the rows of the matrix, the R scales the columns of the matrix.
5504: .seealso: [](chapter_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5505: @*/
5506: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5507: {
5508: PetscFunctionBegin;
5511: if (l) {
5513: PetscCheckSameComm(mat, 1, l, 2);
5514: }
5515: if (r) {
5517: PetscCheckSameComm(mat, 1, r, 3);
5518: }
5519: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5520: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5521: MatCheckPreallocated(mat, 1);
5522: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5524: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5525: PetscUseTypeMethod(mat, diagonalscale, l, r);
5526: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5527: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5528: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5529: PetscFunctionReturn(PETSC_SUCCESS);
5530: }
5532: /*@
5533: MatScale - Scales all elements of a matrix by a given number.
5535: Logically Collective
5537: Input Parameters:
5538: + mat - the matrix to be scaled
5539: - a - the scaling value
5541: Level: intermediate
5543: .seealso: [](chapter_matrices), `Mat`, `MatDiagonalScale()`
5544: @*/
5545: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5546: {
5547: PetscFunctionBegin;
5550: PetscCheck(a == (PetscScalar)1.0 || mat->ops->scale, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
5551: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5552: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5554: MatCheckPreallocated(mat, 1);
5556: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5557: if (a != (PetscScalar)1.0) {
5558: PetscUseTypeMethod(mat, scale, a);
5559: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5560: }
5561: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5562: PetscFunctionReturn(PETSC_SUCCESS);
5563: }
5565: /*@
5566: MatNorm - Calculates various norms of a matrix.
5568: Collective
5570: Input Parameters:
5571: + mat - the matrix
5572: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5574: Output Parameter:
5575: . nrm - the resulting norm
5577: Level: intermediate
5579: .seealso: [](chapter_matrices), `Mat`
5580: @*/
5581: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5582: {
5583: PetscFunctionBegin;
5588: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5589: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5590: MatCheckPreallocated(mat, 1);
5592: PetscUseTypeMethod(mat, norm, type, nrm);
5593: PetscFunctionReturn(PETSC_SUCCESS);
5594: }
5596: /*
5597: This variable is used to prevent counting of MatAssemblyBegin() that
5598: are called from within a MatAssemblyEnd().
5599: */
5600: static PetscInt MatAssemblyEnd_InUse = 0;
5601: /*@
5602: MatAssemblyBegin - Begins assembling the matrix. This routine should
5603: be called after completing all calls to `MatSetValues()`.
5605: Collective
5607: Input Parameters:
5608: + mat - the matrix
5609: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5611: Level: beginner
5613: Notes:
5614: `MatSetValues()` generally caches the values that belong to other MPI ranks. The matrix is ready to
5615: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5617: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5618: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5619: using the matrix.
5621: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5622: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5623: a global collective operation requiring all processes that share the matrix.
5625: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5626: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5627: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5629: .seealso: [](chapter_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5630: @*/
5631: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5632: {
5633: PetscFunctionBegin;
5636: MatCheckPreallocated(mat, 1);
5637: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix.\nDid you forget to call MatSetUnfactored()?");
5638: if (mat->assembled) {
5639: mat->was_assembled = PETSC_TRUE;
5640: mat->assembled = PETSC_FALSE;
5641: }
5643: if (!MatAssemblyEnd_InUse) {
5644: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5645: PetscTryTypeMethod(mat, assemblybegin, type);
5646: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5647: } else PetscTryTypeMethod(mat, assemblybegin, type);
5648: PetscFunctionReturn(PETSC_SUCCESS);
5649: }
5651: /*@
5652: MatAssembled - Indicates if a matrix has been assembled and is ready for
5653: use; for example, in matrix-vector product.
5655: Not Collective
5657: Input Parameter:
5658: . mat - the matrix
5660: Output Parameter:
5661: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5663: Level: advanced
5665: .seealso: [](chapter_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5666: @*/
5667: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5668: {
5669: PetscFunctionBegin;
5672: *assembled = mat->assembled;
5673: PetscFunctionReturn(PETSC_SUCCESS);
5674: }
5676: /*@
5677: MatAssemblyEnd - Completes assembling the matrix. This routine should
5678: be called after `MatAssemblyBegin()`.
5680: Collective
5682: Input Parameters:
5683: + mat - the matrix
5684: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5686: Options Database Keys:
5687: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatEndAssembly()`
5688: . -mat_view ::ascii_info_detail - Prints more detailed info
5689: . -mat_view - Prints matrix in ASCII format
5690: . -mat_view ::ascii_matlab - Prints matrix in Matlab format
5691: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5692: . -display <name> - Sets display name (default is host)
5693: . -draw_pause <sec> - Sets number of seconds to pause after display
5694: . -mat_view socket - Sends matrix to socket, can be accessed from Matlab (See [Using MATLAB with PETSc](ch_matlab))
5695: . -viewer_socket_machine <machine> - Machine to use for socket
5696: . -viewer_socket_port <port> - Port number to use for socket
5697: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5699: Level: beginner
5701: .seealso: [](chapter_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5702: @*/
5703: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5704: {
5705: static PetscInt inassm = 0;
5706: PetscBool flg = PETSC_FALSE;
5708: PetscFunctionBegin;
5712: inassm++;
5713: MatAssemblyEnd_InUse++;
5714: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5715: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5716: PetscTryTypeMethod(mat, assemblyend, type);
5717: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5718: } else PetscTryTypeMethod(mat, assemblyend, type);
5720: /* Flush assembly is not a true assembly */
5721: if (type != MAT_FLUSH_ASSEMBLY) {
5722: if (mat->num_ass) {
5723: if (!mat->symmetry_eternal) {
5724: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5725: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5726: }
5727: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5728: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5729: }
5730: mat->num_ass++;
5731: mat->assembled = PETSC_TRUE;
5732: mat->ass_nonzerostate = mat->nonzerostate;
5733: }
5735: mat->insertmode = NOT_SET_VALUES;
5736: MatAssemblyEnd_InUse--;
5737: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5738: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5739: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5741: if (mat->checksymmetryonassembly) {
5742: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5743: if (flg) {
5744: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5745: } else {
5746: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5747: }
5748: }
5749: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5750: }
5751: inassm--;
5752: PetscFunctionReturn(PETSC_SUCCESS);
5753: }
5755: /*@
5756: MatSetOption - Sets a parameter option for a matrix. Some options
5757: may be specific to certain storage formats. Some options
5758: determine how values will be inserted (or added). Sorted,
5759: row-oriented input will generally assemble the fastest. The default
5760: is row-oriented.
5762: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5764: Input Parameters:
5765: + mat - the matrix
5766: . option - the option, one of those listed below (and possibly others),
5767: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5769: Options Describing Matrix Structure:
5770: + `MAT_SPD` - symmetric positive definite
5771: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5772: . `MAT_HERMITIAN` - transpose is the complex conjugation
5773: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5774: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5775: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5776: - `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5778: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5779: do not need to be computed (usually at a high cost)
5781: Options For Use with `MatSetValues()`:
5782: Insert a logically dense subblock, which can be
5783: . `MAT_ROW_ORIENTED` - row-oriented (default)
5785: These options reflect the data you pass in with `MatSetValues()`; it has
5786: nothing to do with how the data is stored internally in the matrix
5787: data structure.
5789: When (re)assembling a matrix, we can restrict the input for
5790: efficiency/debugging purposes. These options include
5791: + `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5792: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5793: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5794: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5795: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5796: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5797: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5798: performance for very large process counts.
5799: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5800: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5801: functions, instead sending only neighbor messages.
5803: Level: intermediate
5805: Notes:
5806: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5808: Some options are relevant only for particular matrix types and
5809: are thus ignored by others. Other options are not supported by
5810: certain matrix types and will generate an error message if set.
5812: If using Fortran to compute a matrix, one may need to
5813: use the column-oriented option (or convert to the row-oriented
5814: format).
5816: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
5817: that would generate a new entry in the nonzero structure is instead
5818: ignored. Thus, if memory has not already been allocated for this particular
5819: data, then the insertion is ignored. For dense matrices, in which
5820: the entire array is allocated, no entries are ever ignored.
5821: Set after the first `MatAssemblyEnd()`. If this option is set then the MatAssemblyBegin/End() processes has one less global reduction
5823: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
5824: that would generate a new entry in the nonzero structure instead produces
5825: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5827: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
5828: that would generate a new entry that has not been preallocated will
5829: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
5830: only.) This is a useful flag when debugging matrix memory preallocation.
5831: If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5833: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
5834: other processors should be dropped, rather than stashed.
5835: This is useful if you know that the "owning" processor is also
5836: always generating the correct matrix entries, so that PETSc need
5837: not transfer duplicate entries generated on another processor.
5839: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
5840: searches during matrix assembly. When this flag is set, the hash table
5841: is created during the first matrix assembly. This hash table is
5842: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
5843: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
5844: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
5845: supported by `MATMPIBAIJ` format only.
5847: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
5848: are kept in the nonzero structure
5850: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
5851: a zero location in the matrix
5853: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
5855: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
5856: zero row routines and thus improves performance for very large process counts.
5858: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
5859: part of the matrix (since they should match the upper triangular part).
5861: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
5862: single call to `MatSetValues()`, preallocation is perfect, row oriented, `INSERT_VALUES` is used. Common
5863: with finite difference schemes with non-periodic boundary conditions.
5865: Developer Note:
5866: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
5867: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRIC` or `MAT_SPD` would need to be changed back
5868: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
5869: not changed.
5871: .seealso: [](chapter_matrices), `MatOption`, `Mat`, `MatGetOption()`
5872: @*/
5873: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
5874: {
5875: PetscFunctionBegin;
5877: if (op > 0) {
5880: }
5882: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
5884: switch (op) {
5885: case MAT_FORCE_DIAGONAL_ENTRIES:
5886: mat->force_diagonals = flg;
5887: PetscFunctionReturn(PETSC_SUCCESS);
5888: case MAT_NO_OFF_PROC_ENTRIES:
5889: mat->nooffprocentries = flg;
5890: PetscFunctionReturn(PETSC_SUCCESS);
5891: case MAT_SUBSET_OFF_PROC_ENTRIES:
5892: mat->assembly_subset = flg;
5893: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
5894: #if !defined(PETSC_HAVE_MPIUNI)
5895: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
5896: #endif
5897: mat->stash.first_assembly_done = PETSC_FALSE;
5898: }
5899: PetscFunctionReturn(PETSC_SUCCESS);
5900: case MAT_NO_OFF_PROC_ZERO_ROWS:
5901: mat->nooffproczerorows = flg;
5902: PetscFunctionReturn(PETSC_SUCCESS);
5903: case MAT_SPD:
5904: if (flg) {
5905: mat->spd = PETSC_BOOL3_TRUE;
5906: mat->symmetric = PETSC_BOOL3_TRUE;
5907: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5908: } else {
5909: mat->spd = PETSC_BOOL3_FALSE;
5910: }
5911: break;
5912: case MAT_SYMMETRIC:
5913: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5914: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5915: #if !defined(PETSC_USE_COMPLEX)
5916: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5917: #endif
5918: break;
5919: case MAT_HERMITIAN:
5920: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5921: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5922: #if !defined(PETSC_USE_COMPLEX)
5923: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5924: #endif
5925: break;
5926: case MAT_STRUCTURALLY_SYMMETRIC:
5927: mat->structurally_symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5928: break;
5929: case MAT_SYMMETRY_ETERNAL:
5930: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
5931: mat->symmetry_eternal = flg;
5932: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
5933: break;
5934: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
5935: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURAL_SYMMETRIC to true or false");
5936: mat->structural_symmetry_eternal = flg;
5937: break;
5938: case MAT_SPD_ETERNAL:
5939: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
5940: mat->spd_eternal = flg;
5941: if (flg) {
5942: mat->structural_symmetry_eternal = PETSC_TRUE;
5943: mat->symmetry_eternal = PETSC_TRUE;
5944: }
5945: break;
5946: case MAT_STRUCTURE_ONLY:
5947: mat->structure_only = flg;
5948: break;
5949: case MAT_SORTED_FULL:
5950: mat->sortedfull = flg;
5951: break;
5952: default:
5953: break;
5954: }
5955: PetscTryTypeMethod(mat, setoption, op, flg);
5956: PetscFunctionReturn(PETSC_SUCCESS);
5957: }
5959: /*@
5960: MatGetOption - Gets a parameter option that has been set for a matrix.
5962: Logically Collective
5964: Input Parameters:
5965: + mat - the matrix
5966: - option - the option, this only responds to certain options, check the code for which ones
5968: Output Parameter:
5969: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5971: Level: intermediate
5973: Notes:
5974: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
5976: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
5977: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
5979: .seealso: [](chapter_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
5980: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
5981: @*/
5982: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
5983: {
5984: PetscFunctionBegin;
5988: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
5989: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
5991: switch (op) {
5992: case MAT_NO_OFF_PROC_ENTRIES:
5993: *flg = mat->nooffprocentries;
5994: break;
5995: case MAT_NO_OFF_PROC_ZERO_ROWS:
5996: *flg = mat->nooffproczerorows;
5997: break;
5998: case MAT_SYMMETRIC:
5999: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6000: break;
6001: case MAT_HERMITIAN:
6002: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6003: break;
6004: case MAT_STRUCTURALLY_SYMMETRIC:
6005: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6006: break;
6007: case MAT_SPD:
6008: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6009: break;
6010: case MAT_SYMMETRY_ETERNAL:
6011: *flg = mat->symmetry_eternal;
6012: break;
6013: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6014: *flg = mat->symmetry_eternal;
6015: break;
6016: default:
6017: break;
6018: }
6019: PetscFunctionReturn(PETSC_SUCCESS);
6020: }
6022: /*@
6023: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6024: this routine retains the old nonzero structure.
6026: Logically Collective
6028: Input Parameter:
6029: . mat - the matrix
6031: Level: intermediate
6033: Note:
6034: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6035: See the Performance chapter of the users manual for information on preallocating matrices.
6037: .seealso: [](chapter_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6038: @*/
6039: PetscErrorCode MatZeroEntries(Mat mat)
6040: {
6041: PetscFunctionBegin;
6044: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6045: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6046: MatCheckPreallocated(mat, 1);
6048: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6049: PetscUseTypeMethod(mat, zeroentries);
6050: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6051: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6052: PetscFunctionReturn(PETSC_SUCCESS);
6053: }
6055: /*@
6056: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6057: of a set of rows and columns of a matrix.
6059: Collective
6061: Input Parameters:
6062: + mat - the matrix
6063: . numRows - the number of rows/columns to zero
6064: . rows - the global row indices
6065: . diag - value put in the diagonal of the eliminated rows
6066: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6067: - b - optional vector of the right hand side, that will be adjusted by provided solution entries
6069: Level: intermediate
6071: Notes:
6072: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6074: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6075: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6077: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6078: Krylov method to take advantage of the known solution on the zeroed rows.
6080: For the parallel case, all processes that share the matrix (i.e.,
6081: those in the communicator used for matrix creation) MUST call this
6082: routine, regardless of whether any rows being zeroed are owned by
6083: them.
6085: Unlike `MatZeroRows()` this does not change the nonzero structure of the matrix, it merely zeros those entries in the matrix.
6087: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6088: list only rows local to itself).
6090: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6092: .seealso: [](chapter_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6093: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6094: @*/
6095: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6096: {
6097: PetscFunctionBegin;
6101: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6102: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6103: MatCheckPreallocated(mat, 1);
6105: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6106: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6107: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6108: PetscFunctionReturn(PETSC_SUCCESS);
6109: }
6111: /*@
6112: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6113: of a set of rows and columns of a matrix.
6115: Collective
6117: Input Parameters:
6118: + mat - the matrix
6119: . is - the rows to zero
6120: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6121: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6122: - b - optional vector of right hand side, that will be adjusted by provided solution
6124: Level: intermediate
6126: Note:
6127: See `MatZeroRowsColumns()` for details on how this routine operates.
6129: .seealso: [](chapter_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6130: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6131: @*/
6132: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6133: {
6134: PetscInt numRows;
6135: const PetscInt *rows;
6137: PetscFunctionBegin;
6142: PetscCall(ISGetLocalSize(is, &numRows));
6143: PetscCall(ISGetIndices(is, &rows));
6144: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6145: PetscCall(ISRestoreIndices(is, &rows));
6146: PetscFunctionReturn(PETSC_SUCCESS);
6147: }
6149: /*@
6150: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6151: of a set of rows of a matrix.
6153: Collective
6155: Input Parameters:
6156: + mat - the matrix
6157: . numRows - the number of rows to zero
6158: . rows - the global row indices
6159: . diag - value put in the diagonal of the zeroed rows
6160: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6161: - b - optional vector of right hand side, that will be adjusted by provided solution entries
6163: Level: intermediate
6165: Notes:
6166: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6168: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6170: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6171: Krylov method to take advantage of the known solution on the zeroed rows.
6173: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6174: from the matrix.
6176: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6177: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense and block diagonal
6178: formats this does not alter the nonzero structure.
6180: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6181: of the matrix is not changed the values are
6182: merely zeroed.
6184: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6185: formats can optionally remove the main diagonal entry from the
6186: nonzero structure as well, by passing 0.0 as the final argument).
6188: For the parallel case, all processes that share the matrix (i.e.,
6189: those in the communicator used for matrix creation) MUST call this
6190: routine, regardless of whether any rows being zeroed are owned by
6191: them.
6193: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6194: list only rows local to itself).
6196: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6197: owns that are to be zeroed. This saves a global synchronization in the implementation.
6199: .seealso: [](chapter_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6200: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`
6201: @*/
6202: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6203: {
6204: PetscFunctionBegin;
6208: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6209: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6210: MatCheckPreallocated(mat, 1);
6212: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6213: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6214: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6215: PetscFunctionReturn(PETSC_SUCCESS);
6216: }
6218: /*@
6219: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6220: of a set of rows of a matrix.
6222: Collective
6224: Input Parameters:
6225: + mat - the matrix
6226: . is - index set of rows to remove (if `NULL` then no row is removed)
6227: . diag - value put in all diagonals of eliminated rows
6228: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6229: - b - optional vector of right hand side, that will be adjusted by provided solution
6231: Level: intermediate
6233: Note:
6234: See `MatZeroRows()` for details on how this routine operates.
6236: .seealso: [](chapter_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6237: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6238: @*/
6239: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6240: {
6241: PetscInt numRows = 0;
6242: const PetscInt *rows = NULL;
6244: PetscFunctionBegin;
6247: if (is) {
6249: PetscCall(ISGetLocalSize(is, &numRows));
6250: PetscCall(ISGetIndices(is, &rows));
6251: }
6252: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6253: if (is) PetscCall(ISRestoreIndices(is, &rows));
6254: PetscFunctionReturn(PETSC_SUCCESS);
6255: }
6257: /*@
6258: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6259: of a set of rows of a matrix. These rows must be local to the process.
6261: Collective
6263: Input Parameters:
6264: + mat - the matrix
6265: . numRows - the number of rows to remove
6266: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6267: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6268: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6269: - b - optional vector of right hand side, that will be adjusted by provided solution
6271: Level: intermediate
6273: Notes:
6274: See `MatZeroRows()` for details on how this routine operates.
6276: The grid coordinates are across the entire grid, not just the local portion
6278: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6279: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6280: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6281: `DM_BOUNDARY_PERIODIC` boundary type.
6283: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6284: a single value per point) you can skip filling those indices.
6286: Fortran Note:
6287: `idxm` and `idxn` should be declared as
6288: $ MatStencil idxm(4,m)
6289: and the values inserted using
6290: .vb
6291: idxm(MatStencil_i,1) = i
6292: idxm(MatStencil_j,1) = j
6293: idxm(MatStencil_k,1) = k
6294: idxm(MatStencil_c,1) = c
6295: etc
6296: .ve
6298: .seealso: [](chapter_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsl()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6299: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6300: @*/
6301: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6302: {
6303: PetscInt dim = mat->stencil.dim;
6304: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6305: PetscInt *dims = mat->stencil.dims + 1;
6306: PetscInt *starts = mat->stencil.starts;
6307: PetscInt *dxm = (PetscInt *)rows;
6308: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6310: PetscFunctionBegin;
6315: PetscCall(PetscMalloc1(numRows, &jdxm));
6316: for (i = 0; i < numRows; ++i) {
6317: /* Skip unused dimensions (they are ordered k, j, i, c) */
6318: for (j = 0; j < 3 - sdim; ++j) dxm++;
6319: /* Local index in X dir */
6320: tmp = *dxm++ - starts[0];
6321: /* Loop over remaining dimensions */
6322: for (j = 0; j < dim - 1; ++j) {
6323: /* If nonlocal, set index to be negative */
6324: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6325: /* Update local index */
6326: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6327: }
6328: /* Skip component slot if necessary */
6329: if (mat->stencil.noc) dxm++;
6330: /* Local row number */
6331: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6332: }
6333: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6334: PetscCall(PetscFree(jdxm));
6335: PetscFunctionReturn(PETSC_SUCCESS);
6336: }
6338: /*@
6339: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6340: of a set of rows and columns of a matrix.
6342: Collective
6344: Input Parameters:
6345: + mat - the matrix
6346: . numRows - the number of rows/columns to remove
6347: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6348: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6349: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6350: - b - optional vector of right hand side, that will be adjusted by provided solution
6352: Level: intermediate
6354: Notes:
6355: See `MatZeroRowsColumns()` for details on how this routine operates.
6357: The grid coordinates are across the entire grid, not just the local portion
6359: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6360: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6361: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6362: `DM_BOUNDARY_PERIODIC` boundary type.
6364: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6365: a single value per point) you can skip filling those indices.
6367: Fortran Note:
6368: `idxm` and `idxn` should be declared as
6369: $ MatStencil idxm(4,m)
6370: and the values inserted using
6371: .vb
6372: idxm(MatStencil_i,1) = i
6373: idxm(MatStencil_j,1) = j
6374: idxm(MatStencil_k,1) = k
6375: idxm(MatStencil_c,1) = c
6376: etc
6377: .ve
6379: .seealso: [](chapter_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6380: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6381: @*/
6382: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6383: {
6384: PetscInt dim = mat->stencil.dim;
6385: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6386: PetscInt *dims = mat->stencil.dims + 1;
6387: PetscInt *starts = mat->stencil.starts;
6388: PetscInt *dxm = (PetscInt *)rows;
6389: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6391: PetscFunctionBegin;
6396: PetscCall(PetscMalloc1(numRows, &jdxm));
6397: for (i = 0; i < numRows; ++i) {
6398: /* Skip unused dimensions (they are ordered k, j, i, c) */
6399: for (j = 0; j < 3 - sdim; ++j) dxm++;
6400: /* Local index in X dir */
6401: tmp = *dxm++ - starts[0];
6402: /* Loop over remaining dimensions */
6403: for (j = 0; j < dim - 1; ++j) {
6404: /* If nonlocal, set index to be negative */
6405: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6406: /* Update local index */
6407: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6408: }
6409: /* Skip component slot if necessary */
6410: if (mat->stencil.noc) dxm++;
6411: /* Local row number */
6412: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6413: }
6414: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6415: PetscCall(PetscFree(jdxm));
6416: PetscFunctionReturn(PETSC_SUCCESS);
6417: }
6419: /*@C
6420: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6421: of a set of rows of a matrix; using local numbering of rows.
6423: Collective
6425: Input Parameters:
6426: + mat - the matrix
6427: . numRows - the number of rows to remove
6428: . rows - the local row indices
6429: . diag - value put in all diagonals of eliminated rows
6430: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6431: - b - optional vector of right hand side, that will be adjusted by provided solution
6433: Level: intermediate
6435: Notes:
6436: Before calling `MatZeroRowsLocal()`, the user must first set the
6437: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6439: See `MatZeroRows()` for details on how this routine operates.
6441: .seealso: [](chapter_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6442: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6443: @*/
6444: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6445: {
6446: PetscFunctionBegin;
6450: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6451: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6452: MatCheckPreallocated(mat, 1);
6454: if (mat->ops->zerorowslocal) {
6455: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6456: } else {
6457: IS is, newis;
6458: const PetscInt *newRows;
6460: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6461: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6462: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6463: PetscCall(ISGetIndices(newis, &newRows));
6464: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6465: PetscCall(ISRestoreIndices(newis, &newRows));
6466: PetscCall(ISDestroy(&newis));
6467: PetscCall(ISDestroy(&is));
6468: }
6469: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6470: PetscFunctionReturn(PETSC_SUCCESS);
6471: }
6473: /*@
6474: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6475: of a set of rows of a matrix; using local numbering of rows.
6477: Collective
6479: Input Parameters:
6480: + mat - the matrix
6481: . is - index set of rows to remove
6482: . diag - value put in all diagonals of eliminated rows
6483: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6484: - b - optional vector of right hand side, that will be adjusted by provided solution
6486: Level: intermediate
6488: Notes:
6489: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6490: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6492: See `MatZeroRows()` for details on how this routine operates.
6494: .seealso: [](chapter_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6495: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6496: @*/
6497: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6498: {
6499: PetscInt numRows;
6500: const PetscInt *rows;
6502: PetscFunctionBegin;
6506: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6507: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6508: MatCheckPreallocated(mat, 1);
6510: PetscCall(ISGetLocalSize(is, &numRows));
6511: PetscCall(ISGetIndices(is, &rows));
6512: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6513: PetscCall(ISRestoreIndices(is, &rows));
6514: PetscFunctionReturn(PETSC_SUCCESS);
6515: }
6517: /*@
6518: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6519: of a set of rows and columns of a matrix; using local numbering of rows.
6521: Collective
6523: Input Parameters:
6524: + mat - the matrix
6525: . numRows - the number of rows to remove
6526: . rows - the global row indices
6527: . diag - value put in all diagonals of eliminated rows
6528: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6529: - b - optional vector of right hand side, that will be adjusted by provided solution
6531: Level: intermediate
6533: Notes:
6534: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6535: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6537: See `MatZeroRowsColumns()` for details on how this routine operates.
6539: .seealso: [](chapter_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6540: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6541: @*/
6542: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6543: {
6544: IS is, newis;
6545: const PetscInt *newRows;
6547: PetscFunctionBegin;
6551: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6552: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6553: MatCheckPreallocated(mat, 1);
6555: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6556: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6557: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6558: PetscCall(ISGetIndices(newis, &newRows));
6559: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6560: PetscCall(ISRestoreIndices(newis, &newRows));
6561: PetscCall(ISDestroy(&newis));
6562: PetscCall(ISDestroy(&is));
6563: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6564: PetscFunctionReturn(PETSC_SUCCESS);
6565: }
6567: /*@
6568: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6569: of a set of rows and columns of a matrix; using local numbering of rows.
6571: Collective
6573: Input Parameters:
6574: + mat - the matrix
6575: . is - index set of rows to remove
6576: . diag - value put in all diagonals of eliminated rows
6577: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6578: - b - optional vector of right hand side, that will be adjusted by provided solution
6580: Level: intermediate
6582: Notes:
6583: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6584: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6586: See `MatZeroRowsColumns()` for details on how this routine operates.
6588: .seealso: [](chapter_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6589: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6590: @*/
6591: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6592: {
6593: PetscInt numRows;
6594: const PetscInt *rows;
6596: PetscFunctionBegin;
6600: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6601: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6602: MatCheckPreallocated(mat, 1);
6604: PetscCall(ISGetLocalSize(is, &numRows));
6605: PetscCall(ISGetIndices(is, &rows));
6606: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6607: PetscCall(ISRestoreIndices(is, &rows));
6608: PetscFunctionReturn(PETSC_SUCCESS);
6609: }
6611: /*@C
6612: MatGetSize - Returns the numbers of rows and columns in a matrix.
6614: Not Collective
6616: Input Parameter:
6617: . mat - the matrix
6619: Output Parameters:
6620: + m - the number of global rows
6621: - n - the number of global columns
6623: Level: beginner
6625: Note:
6626: Both output parameters can be `NULL` on input.
6628: .seealso: [](chapter_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6629: @*/
6630: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6631: {
6632: PetscFunctionBegin;
6634: if (m) *m = mat->rmap->N;
6635: if (n) *n = mat->cmap->N;
6636: PetscFunctionReturn(PETSC_SUCCESS);
6637: }
6639: /*@C
6640: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6641: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6643: Not Collective
6645: Input Parameter:
6646: . mat - the matrix
6648: Output Parameters:
6649: + m - the number of local rows, use `NULL` to not obtain this value
6650: - n - the number of local columns, use `NULL` to not obtain this value
6652: Level: beginner
6654: .seealso: [](chapter_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6655: @*/
6656: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6657: {
6658: PetscFunctionBegin;
6662: if (m) *m = mat->rmap->n;
6663: if (n) *n = mat->cmap->n;
6664: PetscFunctionReturn(PETSC_SUCCESS);
6665: }
6667: /*@C
6668: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a vector one multiplies this matrix by that are owned by
6669: this processor. (The columns of the "diagonal block" for most sparse matrix formats). See :any:`<sec_matlayout>` for details on matrix layouts.
6671: Not Collective, unless matrix has not been allocated, then collective
6673: Input Parameter:
6674: . mat - the matrix
6676: Output Parameters:
6677: + m - the global index of the first local column, use `NULL` to not obtain this value
6678: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6680: Level: developer
6682: .seealso: [](chapter_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
6683: @*/
6684: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6685: {
6686: PetscFunctionBegin;
6691: MatCheckPreallocated(mat, 1);
6692: if (m) *m = mat->cmap->rstart;
6693: if (n) *n = mat->cmap->rend;
6694: PetscFunctionReturn(PETSC_SUCCESS);
6695: }
6697: /*@C
6698: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6699: this MPI rank. For all matrices it returns the range of matrix rows associated with rows of a vector that would contain the result of a matrix
6700: vector product with this matrix. See :any:`<sec_matlayout>` for details on matrix layouts
6702: Not Collective
6704: Input Parameter:
6705: . mat - the matrix
6707: Output Parameters:
6708: + m - the global index of the first local row, use `NULL` to not obtain this value
6709: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6711: Level: beginner
6713: Note:
6714: This function requires that the matrix be preallocated. If you have not preallocated, consider using
6715: `PetscSplitOwnership`(`MPI_Comm` comm, `PetscInt` *n, `PetscInt` *N)
6716: and then `MPI_Scan()` to calculate prefix sums of the local sizes.
6718: .seealso: [](chapter_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`,
6719: `PetscLayout`
6720: @*/
6721: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6722: {
6723: PetscFunctionBegin;
6728: MatCheckPreallocated(mat, 1);
6729: if (m) *m = mat->rmap->rstart;
6730: if (n) *n = mat->rmap->rend;
6731: PetscFunctionReturn(PETSC_SUCCESS);
6732: }
6734: /*@C
6735: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6736: each process. For all matrices it returns the ranges of matrix rows associated with rows of a vector that would contain the result of a matrix
6737: vector product with this matrix. See :any:`<sec_matlayout>` for details on matrix layouts
6739: Not Collective, unless matrix has not been allocated
6741: Input Parameter:
6742: . mat - the matrix
6744: Output Parameter:
6745: . ranges - start of each processors portion plus one more than the total length at the end
6747: Level: beginner
6749: .seealso: [](chapter_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
6750: @*/
6751: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt **ranges)
6752: {
6753: PetscFunctionBegin;
6756: MatCheckPreallocated(mat, 1);
6757: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6758: PetscFunctionReturn(PETSC_SUCCESS);
6759: }
6761: /*@C
6762: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a vector one multiplies this vector by that are owned by
6763: each processor. (The columns of the "diagonal blocks", for most sparse matrix formats). See :any:`<sec_matlayout>` for details on matrix layouts.
6765: Not Collective, unless matrix has not been allocated
6767: Input Parameter:
6768: . mat - the matrix
6770: Output Parameter:
6771: . ranges - start of each processors portion plus one more then the total length at the end
6773: Level: beginner
6775: .seealso: [](chapter_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`
6776: @*/
6777: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt **ranges)
6778: {
6779: PetscFunctionBegin;
6782: MatCheckPreallocated(mat, 1);
6783: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
6784: PetscFunctionReturn(PETSC_SUCCESS);
6785: }
6787: /*@C
6788: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets. For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this
6789: corresponds to values returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and `MATSCALAPACK` the ownership
6790: is more complicated. See :any:`<sec_matlayout>` for details on matrix layouts.
6792: Not Collective
6794: Input Parameter:
6795: . A - matrix
6797: Output Parameters:
6798: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
6799: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
6801: Level: intermediate
6803: .seealso: [](chapter_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatSetValues()`, ``MATELEMENTAL``, ``MATSCALAPACK``
6804: @*/
6805: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
6806: {
6807: PetscErrorCode (*f)(Mat, IS *, IS *);
6809: PetscFunctionBegin;
6810: MatCheckPreallocated(A, 1);
6811: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
6812: if (f) {
6813: PetscCall((*f)(A, rows, cols));
6814: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
6815: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
6816: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
6817: }
6818: PetscFunctionReturn(PETSC_SUCCESS);
6819: }
6821: /*@C
6822: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
6823: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
6824: to complete the factorization.
6826: Collective
6828: Input Parameters:
6829: + fact - the factorized matrix obtained with `MatGetFactor()`
6830: . mat - the matrix
6831: . row - row permutation
6832: . col - column permutation
6833: - info - structure containing
6834: .vb
6835: levels - number of levels of fill.
6836: expected fill - as ratio of original fill.
6837: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
6838: missing diagonal entries)
6839: .ve
6841: Level: developer
6843: Notes:
6844: See [Matrix Factorization](sec_matfactor) for additional information.
6846: Most users should employ the `KSP` interface for linear solvers
6847: instead of working directly with matrix algebra routines such as this.
6848: See, e.g., `KSPCreate()`.
6850: Uses the definition of level of fill as in Y. Saad, 2003
6852: Developer Note:
6853: The Fortran interface is not autogenerated as the
6854: interface definition cannot be generated correctly [due to `MatFactorInfo`]
6856: References:
6857: . * - Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6859: .seealso: [](chapter_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
6860: `MatGetOrdering()`, `MatFactorInfo`
6861: @*/
6862: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
6863: {
6864: PetscFunctionBegin;
6871: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
6872: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
6873: if (!fact->ops->ilufactorsymbolic) {
6874: MatSolverType stype;
6875: PetscCall(MatFactorGetSolverType(fact, &stype));
6876: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s symbolic ILU using solver type %s", ((PetscObject)mat)->type_name, stype);
6877: }
6878: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6879: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6880: MatCheckPreallocated(mat, 2);
6882: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
6883: PetscCall((fact->ops->ilufactorsymbolic)(fact, mat, row, col, info));
6884: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
6885: PetscFunctionReturn(PETSC_SUCCESS);
6886: }
6888: /*@C
6889: MatICCFactorSymbolic - Performs symbolic incomplete
6890: Cholesky factorization for a symmetric matrix. Use
6891: `MatCholeskyFactorNumeric()` to complete the factorization.
6893: Collective
6895: Input Parameters:
6896: + fact - the factorized matrix obtained with `MatGetFactor()`
6897: . mat - the matrix to be factored
6898: . perm - row and column permutation
6899: - info - structure containing
6900: .vb
6901: levels - number of levels of fill.
6902: expected fill - as ratio of original fill.
6903: .ve
6905: Level: developer
6907: Notes:
6908: Most users should employ the `KSP` interface for linear solvers
6909: instead of working directly with matrix algebra routines such as this.
6910: See, e.g., `KSPCreate()`.
6912: This uses the definition of level of fill as in Y. Saad, 2003
6914: Developer Note:
6915: The Fortran interface is not autogenerated as the
6916: interface definition cannot be generated correctly [due to `MatFactorInfo`]
6918: References:
6919: . * - Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6921: .seealso: [](chapter_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
6922: @*/
6923: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
6924: {
6925: PetscFunctionBegin;
6931: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6932: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
6933: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
6934: if (!(fact)->ops->iccfactorsymbolic) {
6935: MatSolverType stype;
6936: PetscCall(MatFactorGetSolverType(fact, &stype));
6937: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s symbolic ICC using solver type %s", ((PetscObject)mat)->type_name, stype);
6938: }
6939: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6940: MatCheckPreallocated(mat, 2);
6942: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
6943: PetscCall((fact->ops->iccfactorsymbolic)(fact, mat, perm, info));
6944: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
6945: PetscFunctionReturn(PETSC_SUCCESS);
6946: }
6948: /*@C
6949: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
6950: points to an array of valid matrices, they may be reused to store the new
6951: submatrices.
6953: Collective
6955: Input Parameters:
6956: + mat - the matrix
6957: . n - the number of submatrixes to be extracted (on this processor, may be zero)
6958: . irow - index set of rows to extract
6959: . icol - index set of columns to extract
6960: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
6962: Output Parameter:
6963: . submat - the array of submatrices
6965: Level: advanced
6967: Notes:
6968: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
6969: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
6970: to extract a parallel submatrix.
6972: Some matrix types place restrictions on the row and column
6973: indices, such as that they be sorted or that they be equal to each other.
6975: The index sets may not have duplicate entries.
6977: When extracting submatrices from a parallel matrix, each processor can
6978: form a different submatrix by setting the rows and columns of its
6979: individual index sets according to the local submatrix desired.
6981: When finished using the submatrices, the user should destroy
6982: them with `MatDestroySubMatrices()`.
6984: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
6985: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
6987: This routine creates the matrices in submat; you should NOT create them before
6988: calling it. It also allocates the array of matrix pointers submat.
6990: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
6991: request one row/column in a block, they must request all rows/columns that are in
6992: that block. For example, if the block size is 2 you cannot request just row 0 and
6993: column 0.
6995: Fortran Note:
6996: The Fortran interface is slightly different from that given below; it
6997: requires one to pass in as `submat` a `Mat` (integer) array of size at least n+1.
6999: .seealso: [](chapter_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7000: @*/
7001: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7002: {
7003: PetscInt i;
7004: PetscBool eq;
7006: PetscFunctionBegin;
7009: if (n) {
7014: }
7016: if (n && scall == MAT_REUSE_MATRIX) {
7019: }
7020: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7021: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7022: MatCheckPreallocated(mat, 1);
7023: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7024: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7025: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7026: for (i = 0; i < n; i++) {
7027: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7028: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7029: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7030: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7031: if (mat->boundtocpu && mat->bindingpropagates) {
7032: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7033: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7034: }
7035: #endif
7036: }
7037: PetscFunctionReturn(PETSC_SUCCESS);
7038: }
7040: /*@C
7041: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of `IS` that may live on subcomms).
7043: Collective
7045: Input Parameters:
7046: + mat - the matrix
7047: . n - the number of submatrixes to be extracted
7048: . irow - index set of rows to extract
7049: . icol - index set of columns to extract
7050: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7052: Output Parameter:
7053: . submat - the array of submatrices
7055: Level: advanced
7057: Note:
7058: This is used by `PCGASM`
7060: .seealso: [](chapter_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7061: @*/
7062: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7063: {
7064: PetscInt i;
7065: PetscBool eq;
7067: PetscFunctionBegin;
7070: if (n) {
7075: }
7077: if (n && scall == MAT_REUSE_MATRIX) {
7080: }
7081: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7082: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7083: MatCheckPreallocated(mat, 1);
7085: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7086: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7087: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7088: for (i = 0; i < n; i++) {
7089: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7090: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7091: }
7092: PetscFunctionReturn(PETSC_SUCCESS);
7093: }
7095: /*@C
7096: MatDestroyMatrices - Destroys an array of matrices.
7098: Collective
7100: Input Parameters:
7101: + n - the number of local matrices
7102: - mat - the matrices (this is a pointer to the array of matrices)
7104: Level: advanced
7106: Note:
7107: Frees not only the matrices, but also the array that contains the matrices
7109: Fortran Note:
7110: This does not free the array.
7112: .seealso: [](chapter_matrices), `Mat`, `MatCreateSubMatrices()` `MatDestroySubMatrices()`
7113: @*/
7114: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7115: {
7116: PetscInt i;
7118: PetscFunctionBegin;
7119: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7120: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7123: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7125: /* memory is allocated even if n = 0 */
7126: PetscCall(PetscFree(*mat));
7127: PetscFunctionReturn(PETSC_SUCCESS);
7128: }
7130: /*@C
7131: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7133: Collective
7135: Input Parameters:
7136: + n - the number of local matrices
7137: - mat - the matrices (this is a pointer to the array of matrices, just to match the calling
7138: sequence of `MatCreateSubMatrices()`)
7140: Level: advanced
7142: Note:
7143: Frees not only the matrices, but also the array that contains the matrices
7145: Fortran Note:
7146: This does not free the array.
7148: .seealso: [](chapter_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7149: @*/
7150: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7151: {
7152: Mat mat0;
7154: PetscFunctionBegin;
7155: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7156: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7157: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7160: mat0 = (*mat)[0];
7161: if (mat0 && mat0->ops->destroysubmatrices) {
7162: PetscCall((mat0->ops->destroysubmatrices)(n, mat));
7163: } else {
7164: PetscCall(MatDestroyMatrices(n, mat));
7165: }
7166: PetscFunctionReturn(PETSC_SUCCESS);
7167: }
7169: /*@C
7170: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7172: Collective
7174: Input Parameter:
7175: . mat - the matrix
7177: Output Parameter:
7178: . matstruct - the sequential matrix with the nonzero structure of mat
7180: Level: developer
7182: .seealso: [](chapter_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7183: @*/
7184: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7185: {
7186: PetscFunctionBegin;
7191: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7192: MatCheckPreallocated(mat, 1);
7194: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7195: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7196: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7197: PetscFunctionReturn(PETSC_SUCCESS);
7198: }
7200: /*@C
7201: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7203: Collective
7205: Input Parameter:
7206: . mat - the matrix (this is a pointer to the array of matrices, just to match the calling
7207: sequence of `MatGetSequentialNonzeroStructure()`)
7209: Level: advanced
7211: Note:
7212: Frees not only the matrices, but also the array that contains the matrices
7214: .seealso: [](chapter_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7215: @*/
7216: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7217: {
7218: PetscFunctionBegin;
7220: PetscCall(MatDestroy(mat));
7221: PetscFunctionReturn(PETSC_SUCCESS);
7222: }
7224: /*@
7225: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7226: replaces the index sets by larger ones that represent submatrices with
7227: additional overlap.
7229: Collective
7231: Input Parameters:
7232: + mat - the matrix
7233: . n - the number of index sets
7234: . is - the array of index sets (these index sets will changed during the call)
7235: - ov - the additional overlap requested
7237: Options Database Key:
7238: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7240: Level: developer
7242: Note:
7243: The computed overlap preserves the matrix block sizes when the blocks are square.
7244: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7245: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7247: .seealso: [](chapter_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7248: @*/
7249: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7250: {
7251: PetscInt i, bs, cbs;
7253: PetscFunctionBegin;
7257: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7258: if (n) {
7261: }
7262: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7263: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7264: MatCheckPreallocated(mat, 1);
7266: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7267: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7268: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7269: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7270: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7271: if (bs == cbs) {
7272: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7273: }
7274: PetscFunctionReturn(PETSC_SUCCESS);
7275: }
7277: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7279: /*@
7280: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7281: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7282: additional overlap.
7284: Collective
7286: Input Parameters:
7287: + mat - the matrix
7288: . n - the number of index sets
7289: . is - the array of index sets (these index sets will changed during the call)
7290: - ov - the additional overlap requested
7292: ` Options Database Key:
7293: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7295: Level: developer
7297: .seealso: [](chapter_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7298: @*/
7299: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7300: {
7301: PetscInt i;
7303: PetscFunctionBegin;
7306: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7307: if (n) {
7310: }
7311: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7312: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7313: MatCheckPreallocated(mat, 1);
7314: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7315: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7316: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7317: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7318: PetscFunctionReturn(PETSC_SUCCESS);
7319: }
7321: /*@
7322: MatGetBlockSize - Returns the matrix block size.
7324: Not Collective
7326: Input Parameter:
7327: . mat - the matrix
7329: Output Parameter:
7330: . bs - block size
7332: Level: intermediate
7334: Notes:
7335: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7337: If the block size has not been set yet this routine returns 1.
7339: .seealso: [](chapter_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7340: @*/
7341: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7342: {
7343: PetscFunctionBegin;
7346: *bs = PetscAbs(mat->rmap->bs);
7347: PetscFunctionReturn(PETSC_SUCCESS);
7348: }
7350: /*@
7351: MatGetBlockSizes - Returns the matrix block row and column sizes.
7353: Not Collective
7355: Input Parameter:
7356: . mat - the matrix
7358: Output Parameters:
7359: + rbs - row block size
7360: - cbs - column block size
7362: Level: intermediate
7364: Notes:
7365: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7366: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7368: If a block size has not been set yet this routine returns 1.
7370: .seealso: [](chapter_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7371: @*/
7372: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7373: {
7374: PetscFunctionBegin;
7378: if (rbs) *rbs = PetscAbs(mat->rmap->bs);
7379: if (cbs) *cbs = PetscAbs(mat->cmap->bs);
7380: PetscFunctionReturn(PETSC_SUCCESS);
7381: }
7383: /*@
7384: MatSetBlockSize - Sets the matrix block size.
7386: Logically Collective
7388: Input Parameters:
7389: + mat - the matrix
7390: - bs - block size
7392: Level: intermediate
7394: Notes:
7395: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7396: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7398: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7399: is compatible with the matrix local sizes.
7401: .seealso: [](chapter_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7402: @*/
7403: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7404: {
7405: PetscFunctionBegin;
7408: PetscCall(MatSetBlockSizes(mat, bs, bs));
7409: PetscFunctionReturn(PETSC_SUCCESS);
7410: }
7412: typedef struct {
7413: PetscInt n;
7414: IS *is;
7415: Mat *mat;
7416: PetscObjectState nonzerostate;
7417: Mat C;
7418: } EnvelopeData;
7420: static PetscErrorCode EnvelopeDataDestroy(EnvelopeData *edata)
7421: {
7422: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7423: PetscCall(PetscFree(edata->is));
7424: PetscCall(PetscFree(edata));
7425: return PETSC_SUCCESS;
7426: }
7428: /*
7429: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7430: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7432: Collective
7434: Input Parameter:
7435: . mat - the matrix
7437: Notes:
7438: There can be zeros within the blocks
7440: The blocks can overlap between processes, including laying on more than two processes
7442: .seealso: [](chapter_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7443: */
7444: static PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7445: {
7446: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7447: PetscInt *diag, *odiag, sc;
7448: VecScatter scatter;
7449: PetscScalar *seqv;
7450: const PetscScalar *parv;
7451: const PetscInt *ia, *ja;
7452: PetscBool set, flag, done;
7453: Mat AA = mat, A;
7454: MPI_Comm comm;
7455: PetscMPIInt rank, size, tag;
7456: MPI_Status status;
7457: PetscContainer container;
7458: EnvelopeData *edata;
7459: Vec seq, par;
7460: IS isglobal;
7462: PetscFunctionBegin;
7464: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7465: if (!set || !flag) {
7466: /* TOO: only needs nonzero structure of transpose */
7467: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7468: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7469: }
7470: PetscCall(MatAIJGetLocalMat(AA, &A));
7471: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7472: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7474: PetscCall(MatGetLocalSize(mat, &n, NULL));
7475: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7476: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7477: PetscCallMPI(MPI_Comm_size(comm, &size));
7478: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7480: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7482: if (rank > 0) {
7483: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7484: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7485: }
7486: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7487: for (i = 0; i < n; i++) {
7488: env = PetscMax(env, ja[ia[i + 1] - 1]);
7489: II = rstart + i;
7490: if (env == II) {
7491: starts[lblocks] = tbs;
7492: sizes[lblocks++] = 1 + II - tbs;
7493: tbs = 1 + II;
7494: }
7495: }
7496: if (rank < size - 1) {
7497: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7498: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7499: }
7501: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7502: if (!set || !flag) PetscCall(MatDestroy(&AA));
7503: PetscCall(MatDestroy(&A));
7505: PetscCall(PetscNew(&edata));
7506: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7507: edata->n = lblocks;
7508: /* create IS needed for extracting blocks from the original matrix */
7509: PetscCall(PetscMalloc1(lblocks, &edata->is));
7510: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7512: /* Create the resulting inverse matrix structure with preallocation information */
7513: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7514: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7515: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7516: PetscCall(MatSetType(edata->C, MATAIJ));
7518: /* Communicate the start and end of each row, from each block to the correct rank */
7519: /* TODO: Use PetscSF instead of VecScatter */
7520: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7521: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7522: PetscCall(VecGetArrayWrite(seq, &seqv));
7523: for (PetscInt i = 0; i < lblocks; i++) {
7524: for (PetscInt j = 0; j < sizes[i]; j++) {
7525: seqv[cnt] = starts[i];
7526: seqv[cnt + 1] = starts[i] + sizes[i];
7527: cnt += 2;
7528: }
7529: }
7530: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7531: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7532: sc -= cnt;
7533: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7534: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7535: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7536: PetscCall(ISDestroy(&isglobal));
7537: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7538: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7539: PetscCall(VecScatterDestroy(&scatter));
7540: PetscCall(VecDestroy(&seq));
7541: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7542: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7543: PetscCall(VecGetArrayRead(par, &parv));
7544: cnt = 0;
7545: PetscCall(MatGetSize(mat, NULL, &n));
7546: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7547: PetscInt start, end, d = 0, od = 0;
7549: start = (PetscInt)PetscRealPart(parv[cnt]);
7550: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7551: cnt += 2;
7553: if (start < cstart) {
7554: od += cstart - start + n - cend;
7555: d += cend - cstart;
7556: } else if (start < cend) {
7557: od += n - cend;
7558: d += cend - start;
7559: } else od += n - start;
7560: if (end <= cstart) {
7561: od -= cstart - end + n - cend;
7562: d -= cend - cstart;
7563: } else if (end < cend) {
7564: od -= n - cend;
7565: d -= cend - end;
7566: } else od -= n - end;
7568: odiag[i] = od;
7569: diag[i] = d;
7570: }
7571: PetscCall(VecRestoreArrayRead(par, &parv));
7572: PetscCall(VecDestroy(&par));
7573: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7574: PetscCall(PetscFree2(diag, odiag));
7575: PetscCall(PetscFree2(sizes, starts));
7577: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7578: PetscCall(PetscContainerSetPointer(container, edata));
7579: PetscCall(PetscContainerSetUserDestroy(container, (PetscErrorCode(*)(void *))EnvelopeDataDestroy));
7580: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7581: PetscCall(PetscObjectDereference((PetscObject)container));
7582: PetscFunctionReturn(PETSC_SUCCESS);
7583: }
7585: /*@
7586: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7588: Collective
7590: Input Parameters:
7591: + A - the matrix
7592: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7594: Output Parameter:
7595: . C - matrix with inverted block diagonal of `A`
7597: Level: advanced
7599: Note:
7600: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7602: .seealso: [](chapter_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7603: @*/
7604: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7605: {
7606: PetscContainer container;
7607: EnvelopeData *edata;
7608: PetscObjectState nonzerostate;
7610: PetscFunctionBegin;
7611: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7612: if (!container) {
7613: PetscCall(MatComputeVariableBlockEnvelope(A));
7614: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7615: }
7616: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7617: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7618: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7619: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7621: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7622: *C = edata->C;
7624: for (PetscInt i = 0; i < edata->n; i++) {
7625: Mat D;
7626: PetscScalar *dvalues;
7628: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7629: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7630: PetscCall(MatSeqDenseInvert(D));
7631: PetscCall(MatDenseGetArray(D, &dvalues));
7632: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7633: PetscCall(MatDestroy(&D));
7634: }
7635: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7636: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7637: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7638: PetscFunctionReturn(PETSC_SUCCESS);
7639: }
7641: /*@
7642: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7644: Logically Collective
7646: Input Parameters:
7647: + mat - the matrix
7648: . nblocks - the number of blocks on this process, each block can only exist on a single process
7649: - bsizes - the block sizes
7651: Level: intermediate
7653: Notes:
7654: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7656: Each variable point-block set of degrees of freedom must live on a single MPI rank. That is a point block cannot straddle two MPI ranks.
7658: .seealso: [](chapter_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7659: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7660: @*/
7661: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, PetscInt *bsizes)
7662: {
7663: PetscInt i, ncnt = 0, nlocal;
7665: PetscFunctionBegin;
7667: PetscCheck(nblocks >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks must be great than or equal to zero");
7668: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7669: for (i = 0; i < nblocks; i++) ncnt += bsizes[i];
7670: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7671: PetscCall(PetscFree(mat->bsizes));
7672: mat->nblocks = nblocks;
7673: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7674: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7675: PetscFunctionReturn(PETSC_SUCCESS);
7676: }
7678: /*@C
7679: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7681: Logically Collective; No Fortran Support
7683: Input Parameter:
7684: . mat - the matrix
7686: Output Parameters:
7687: + nblocks - the number of blocks on this process
7688: - bsizes - the block sizes
7690: Level: intermediate
7692: .seealso: [](chapter_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7693: @*/
7694: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt **bsizes)
7695: {
7696: PetscFunctionBegin;
7698: *nblocks = mat->nblocks;
7699: *bsizes = mat->bsizes;
7700: PetscFunctionReturn(PETSC_SUCCESS);
7701: }
7703: /*@
7704: MatSetBlockSizes - Sets the matrix block row and column sizes.
7706: Logically Collective
7708: Input Parameters:
7709: + mat - the matrix
7710: . rbs - row block size
7711: - cbs - column block size
7713: Level: intermediate
7715: Notes:
7716: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7717: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7718: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7720: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7721: are compatible with the matrix local sizes.
7723: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
7725: .seealso: [](chapter_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7726: @*/
7727: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
7728: {
7729: PetscFunctionBegin;
7733: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
7734: if (mat->rmap->refcnt) {
7735: ISLocalToGlobalMapping l2g = NULL;
7736: PetscLayout nmap = NULL;
7738: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
7739: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
7740: PetscCall(PetscLayoutDestroy(&mat->rmap));
7741: mat->rmap = nmap;
7742: mat->rmap->mapping = l2g;
7743: }
7744: if (mat->cmap->refcnt) {
7745: ISLocalToGlobalMapping l2g = NULL;
7746: PetscLayout nmap = NULL;
7748: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
7749: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
7750: PetscCall(PetscLayoutDestroy(&mat->cmap));
7751: mat->cmap = nmap;
7752: mat->cmap->mapping = l2g;
7753: }
7754: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
7755: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
7756: PetscFunctionReturn(PETSC_SUCCESS);
7757: }
7759: /*@
7760: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7762: Logically Collective
7764: Input Parameters:
7765: + mat - the matrix
7766: . fromRow - matrix from which to copy row block size
7767: - fromCol - matrix from which to copy column block size (can be same as fromRow)
7769: Level: developer
7771: .seealso: [](chapter_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
7772: @*/
7773: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
7774: {
7775: PetscFunctionBegin;
7779: if (fromRow->rmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
7780: if (fromCol->cmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
7781: PetscFunctionReturn(PETSC_SUCCESS);
7782: }
7784: /*@
7785: MatResidual - Default routine to calculate the residual r = b - Ax
7787: Collective
7789: Input Parameters:
7790: + mat - the matrix
7791: . b - the right-hand-side
7792: - x - the approximate solution
7794: Output Parameter:
7795: . r - location to store the residual
7797: Level: developer
7799: .seealso: [](chapter_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
7800: @*/
7801: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
7802: {
7803: PetscFunctionBegin;
7809: MatCheckPreallocated(mat, 1);
7810: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
7811: if (!mat->ops->residual) {
7812: PetscCall(MatMult(mat, x, r));
7813: PetscCall(VecAYPX(r, -1.0, b));
7814: } else {
7815: PetscUseTypeMethod(mat, residual, b, x, r);
7816: }
7817: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
7818: PetscFunctionReturn(PETSC_SUCCESS);
7819: }
7821: /*MC
7822: MatGetRowIJF90 - Obtains the compressed row storage i and j indices for the local rows of a sparse matrix
7824: Synopsis:
7825: MatGetRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
7827: Not Collective
7829: Input Parameters:
7830: + A - the matrix
7831: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7832: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7833: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7834: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7835: always used.
7837: Output Parameters:
7838: + n - number of local rows in the (possibly compressed) matrix
7839: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7840: . ja - the column indices
7841: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7842: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7844: Level: developer
7846: Note:
7847: Use `MatRestoreRowIJF90()` when you no longer need access to the data
7849: .seealso: [](chapter_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatRestoreRowIJF90()`
7850: M*/
7852: /*MC
7853: MatRestoreRowIJF90 - restores the compressed row storage i and j indices for the local rows of a sparse matrix obtained with `MatGetRowIJF90()`
7855: Synopsis:
7856: MatRestoreRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
7858: Not Collective
7860: Input Parameters:
7861: + A - the matrix
7862: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7863: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7864: inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7865: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7866: always used.
7867: . n - number of local rows in the (possibly compressed) matrix
7868: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7869: . ja - the column indices
7870: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7871: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7873: Level: developer
7875: .seealso: [](chapter_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatGetRowIJF90()`
7876: M*/
7878: /*@C
7879: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
7881: Collective
7883: Input Parameters:
7884: + mat - the matrix
7885: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7886: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7887: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7888: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7889: always used.
7891: Output Parameters:
7892: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
7893: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
7894: . ja - the column indices, use `NULL` if not needed
7895: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7896: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7898: Level: developer
7900: Notes:
7901: You CANNOT change any of the ia[] or ja[] values.
7903: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
7905: Fortran Notes:
7906: Use
7907: .vb
7908: PetscInt, pointer :: ia(:),ja(:)
7909: call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
7910: ! Access the ith and jth entries via ia(i) and ja(j)
7911: .ve
7912: `MatGetRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatGetRowIJF90()`
7914: .seealso: [](chapter_matrices), `Mat`, `MATAIJ`, `MatGetRowIJF90()`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
7915: @*/
7916: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
7917: {
7918: PetscFunctionBegin;
7925: MatCheckPreallocated(mat, 1);
7926: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
7927: else {
7928: if (done) *done = PETSC_TRUE;
7929: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
7930: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
7931: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
7932: }
7933: PetscFunctionReturn(PETSC_SUCCESS);
7934: }
7936: /*@C
7937: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
7939: Collective
7941: Input Parameters:
7942: + mat - the matrix
7943: . shift - 1 or zero indicating we want the indices starting at 0 or 1
7944: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
7945: symmetrized
7946: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7947: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7948: always used.
7949: . n - number of columns in the (possibly compressed) matrix
7950: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
7951: - ja - the row indices
7953: Output Parameter:
7954: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
7956: Level: developer
7958: .seealso: [](chapter_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
7959: @*/
7960: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
7961: {
7962: PetscFunctionBegin;
7969: MatCheckPreallocated(mat, 1);
7970: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
7971: else {
7972: *done = PETSC_TRUE;
7973: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
7974: }
7975: PetscFunctionReturn(PETSC_SUCCESS);
7976: }
7978: /*@C
7979: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
7981: Collective
7983: Input Parameters:
7984: + mat - the matrix
7985: . shift - 1 or zero indicating we want the indices starting at 0 or 1
7986: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7987: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7988: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7989: always used.
7990: . n - size of (possibly compressed) matrix
7991: . ia - the row pointers
7992: - ja - the column indices
7994: Output Parameter:
7995: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
7997: Level: developer
7999: Note:
8000: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8001: us of the array after it has been restored. If you pass `NULL`, it will
8002: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8004: Fortran Note:
8005: `MatRestoreRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatRestoreRowIJF90()`
8007: .seealso: [](chapter_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreRowIJF90()`, `MatRestoreColumnIJ()`
8008: @*/
8009: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8010: {
8011: PetscFunctionBegin;
8017: MatCheckPreallocated(mat, 1);
8019: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8020: else {
8021: if (done) *done = PETSC_TRUE;
8022: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8023: if (n) *n = 0;
8024: if (ia) *ia = NULL;
8025: if (ja) *ja = NULL;
8026: }
8027: PetscFunctionReturn(PETSC_SUCCESS);
8028: }
8030: /*@C
8031: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8033: Collective
8035: Input Parameters:
8036: + mat - the matrix
8037: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8038: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8039: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8040: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8041: always used.
8043: Output Parameters:
8044: + n - size of (possibly compressed) matrix
8045: . ia - the column pointers
8046: . ja - the row indices
8047: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8049: Level: developer
8051: .seealso: [](chapter_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8052: @*/
8053: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8054: {
8055: PetscFunctionBegin;
8061: MatCheckPreallocated(mat, 1);
8063: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8064: else {
8065: *done = PETSC_TRUE;
8066: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8067: if (n) *n = 0;
8068: if (ia) *ia = NULL;
8069: if (ja) *ja = NULL;
8070: }
8071: PetscFunctionReturn(PETSC_SUCCESS);
8072: }
8074: /*@C
8075: MatColoringPatch -Used inside matrix coloring routines that use `MatGetRowIJ()` and/or `MatGetColumnIJ()`.
8077: Collective
8079: Input Parameters:
8080: + mat - the matrix
8081: . ncolors - maximum color value
8082: . n - number of entries in colorarray
8083: - colorarray - array indicating color for each column
8085: Output Parameter:
8086: . iscoloring - coloring generated using colorarray information
8088: Level: developer
8090: .seealso: [](chapter_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8091: @*/
8092: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8093: {
8094: PetscFunctionBegin;
8099: MatCheckPreallocated(mat, 1);
8101: if (!mat->ops->coloringpatch) {
8102: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8103: } else {
8104: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8105: }
8106: PetscFunctionReturn(PETSC_SUCCESS);
8107: }
8109: /*@
8110: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8112: Logically Collective
8114: Input Parameter:
8115: . mat - the factored matrix to be reset
8117: Level: developer
8119: Notes:
8120: This routine should be used only with factored matrices formed by in-place
8121: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8122: format). This option can save memory, for example, when solving nonlinear
8123: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8124: ILU(0) preconditioner.
8126: One can specify in-place ILU(0) factorization by calling
8127: .vb
8128: PCType(pc,PCILU);
8129: PCFactorSeUseInPlace(pc);
8130: .ve
8131: or by using the options -pc_type ilu -pc_factor_in_place
8133: In-place factorization ILU(0) can also be used as a local
8134: solver for the blocks within the block Jacobi or additive Schwarz
8135: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8136: for details on setting local solver options.
8138: Most users should employ the `KSP` interface for linear solvers
8139: instead of working directly with matrix algebra routines such as this.
8140: See, e.g., `KSPCreate()`.
8142: .seealso: [](chapter_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8143: @*/
8144: PetscErrorCode MatSetUnfactored(Mat mat)
8145: {
8146: PetscFunctionBegin;
8149: MatCheckPreallocated(mat, 1);
8150: mat->factortype = MAT_FACTOR_NONE;
8151: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8152: PetscUseTypeMethod(mat, setunfactored);
8153: PetscFunctionReturn(PETSC_SUCCESS);
8154: }
8156: /*MC
8157: MatDenseGetArrayF90 - Accesses a matrix array from Fortran
8159: Synopsis:
8160: MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8162: Not Collective
8164: Input Parameter:
8165: . x - matrix
8167: Output Parameters:
8168: + xx_v - the Fortran pointer to the array
8169: - ierr - error code
8171: Example of Usage:
8172: .vb
8173: PetscScalar, pointer xx_v(:,:)
8174: ....
8175: call MatDenseGetArrayF90(x,xx_v,ierr)
8176: a = xx_v(3)
8177: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8178: .ve
8180: Level: advanced
8182: .seealso: [](chapter_matrices), `Mat`, `MatDenseRestoreArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJGetArrayF90()`
8183: M*/
8185: /*MC
8186: MatDenseRestoreArrayF90 - Restores a matrix array that has been
8187: accessed with `MatDenseGetArrayF90()`.
8189: Synopsis:
8190: MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8192: Not Collective
8194: Input Parameters:
8195: + x - matrix
8196: - xx_v - the Fortran90 pointer to the array
8198: Output Parameter:
8199: . ierr - error code
8201: Example of Usage:
8202: .vb
8203: PetscScalar, pointer xx_v(:,:)
8204: ....
8205: call MatDenseGetArrayF90(x,xx_v,ierr)
8206: a = xx_v(3)
8207: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8208: .ve
8210: Level: advanced
8212: .seealso: [](chapter_matrices), `Mat`, `MatDenseGetArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJRestoreArrayF90()`
8213: M*/
8215: /*MC
8216: MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran.
8218: Synopsis:
8219: MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8221: Not Collective
8223: Input Parameter:
8224: . x - matrix
8226: Output Parameters:
8227: + xx_v - the Fortran pointer to the array
8228: - ierr - error code
8230: Example of Usage:
8231: .vb
8232: PetscScalar, pointer xx_v(:)
8233: ....
8234: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8235: a = xx_v(3)
8236: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8237: .ve
8239: Level: advanced
8241: .seealso: [](chapter_matrices), `Mat`, `MatSeqAIJRestoreArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseGetArrayF90()`
8242: M*/
8244: /*MC
8245: MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
8246: accessed with `MatSeqAIJGetArrayF90()`.
8248: Synopsis:
8249: MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8251: Not Collective
8253: Input Parameters:
8254: + x - matrix
8255: - xx_v - the Fortran90 pointer to the array
8257: Output Parameter:
8258: . ierr - error code
8260: Example of Usage:
8261: .vb
8262: PetscScalar, pointer xx_v(:)
8263: ....
8264: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8265: a = xx_v(3)
8266: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8267: .ve
8269: Level: advanced
8271: .seealso: [](chapter_matrices), `Mat`, `MatSeqAIJGetArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseRestoreArrayF90()`
8272: M*/
8274: /*@
8275: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8276: as the original matrix.
8278: Collective
8280: Input Parameters:
8281: + mat - the original matrix
8282: . isrow - parallel `IS` containing the rows this processor should obtain
8283: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8284: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8286: Output Parameter:
8287: . newmat - the new submatrix, of the same type as the original matrix
8289: Level: advanced
8291: Notes:
8292: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8294: Some matrix types place restrictions on the row and column indices, such
8295: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8296: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8298: The index sets may not have duplicate entries.
8300: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8301: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8302: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8303: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8304: you are finished using it.
8306: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8307: the input matrix.
8309: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8311: Example usage:
8312: Consider the following 8x8 matrix with 34 non-zero values, that is
8313: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8314: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8315: as follows
8316: .vb
8317: 1 2 0 | 0 3 0 | 0 4
8318: Proc0 0 5 6 | 7 0 0 | 8 0
8319: 9 0 10 | 11 0 0 | 12 0
8320: -------------------------------------
8321: 13 0 14 | 15 16 17 | 0 0
8322: Proc1 0 18 0 | 19 20 21 | 0 0
8323: 0 0 0 | 22 23 0 | 24 0
8324: -------------------------------------
8325: Proc2 25 26 27 | 0 0 28 | 29 0
8326: 30 0 0 | 31 32 33 | 0 34
8327: .ve
8329: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8331: .vb
8332: 2 0 | 0 3 0 | 0
8333: Proc0 5 6 | 7 0 0 | 8
8334: -------------------------------
8335: Proc1 18 0 | 19 20 21 | 0
8336: -------------------------------
8337: Proc2 26 27 | 0 0 28 | 29
8338: 0 0 | 31 32 33 | 0
8339: .ve
8341: .seealso: [](chapter_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8342: @*/
8343: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8344: {
8345: PetscMPIInt size;
8346: Mat *local;
8347: IS iscoltmp;
8348: PetscBool flg;
8350: PetscFunctionBegin;
8357: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8358: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8360: MatCheckPreallocated(mat, 1);
8361: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8363: if (!iscol || isrow == iscol) {
8364: PetscBool stride;
8365: PetscMPIInt grabentirematrix = 0, grab;
8366: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8367: if (stride) {
8368: PetscInt first, step, n, rstart, rend;
8369: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8370: if (step == 1) {
8371: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8372: if (rstart == first) {
8373: PetscCall(ISGetLocalSize(isrow, &n));
8374: if (n == rend - rstart) grabentirematrix = 1;
8375: }
8376: }
8377: }
8378: PetscCall(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8379: if (grab) {
8380: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8381: if (cll == MAT_INITIAL_MATRIX) {
8382: *newmat = mat;
8383: PetscCall(PetscObjectReference((PetscObject)mat));
8384: }
8385: PetscFunctionReturn(PETSC_SUCCESS);
8386: }
8387: }
8389: if (!iscol) {
8390: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8391: } else {
8392: iscoltmp = iscol;
8393: }
8395: /* if original matrix is on just one processor then use submatrix generated */
8396: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8397: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8398: goto setproperties;
8399: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8400: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8401: *newmat = *local;
8402: PetscCall(PetscFree(local));
8403: goto setproperties;
8404: } else if (!mat->ops->createsubmatrix) {
8405: /* Create a new matrix type that implements the operation using the full matrix */
8406: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8407: switch (cll) {
8408: case MAT_INITIAL_MATRIX:
8409: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8410: break;
8411: case MAT_REUSE_MATRIX:
8412: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8413: break;
8414: default:
8415: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8416: }
8417: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8418: goto setproperties;
8419: }
8421: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8422: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8423: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8425: setproperties:
8426: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8427: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8428: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8429: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8430: PetscFunctionReturn(PETSC_SUCCESS);
8431: }
8433: /*@
8434: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8436: Not Collective
8438: Input Parameters:
8439: + A - the matrix we wish to propagate options from
8440: - B - the matrix we wish to propagate options to
8442: Level: beginner
8444: Note:
8445: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8447: .seealso: [](chapter_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, MatIsStructurallySymmetricKnown()`
8448: @*/
8449: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8450: {
8451: PetscFunctionBegin;
8454: B->symmetry_eternal = A->symmetry_eternal;
8455: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8456: B->symmetric = A->symmetric;
8457: B->structurally_symmetric = A->structurally_symmetric;
8458: B->spd = A->spd;
8459: B->hermitian = A->hermitian;
8460: PetscFunctionReturn(PETSC_SUCCESS);
8461: }
8463: /*@
8464: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8465: used during the assembly process to store values that belong to
8466: other processors.
8468: Not Collective
8470: Input Parameters:
8471: + mat - the matrix
8472: . size - the initial size of the stash.
8473: - bsize - the initial size of the block-stash(if used).
8475: Options Database Keys:
8476: + -matstash_initial_size <size> or <size0,size1,...sizep-1>
8477: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1>
8479: Level: intermediate
8481: Notes:
8482: The block-stash is used for values set with `MatSetValuesBlocked()` while
8483: the stash is used for values set with `MatSetValues()`
8485: Run with the option -info and look for output of the form
8486: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8487: to determine the appropriate value, MM, to use for size and
8488: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8489: to determine the value, BMM to use for bsize
8491: .seealso: [](chapter_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8492: @*/
8493: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8494: {
8495: PetscFunctionBegin;
8498: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8499: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8500: PetscFunctionReturn(PETSC_SUCCESS);
8501: }
8503: /*@
8504: MatInterpolateAdd - w = y + A*x or A'*x depending on the shape of
8505: the matrix
8507: Neighbor-wise Collective
8509: Input Parameters:
8510: + mat - the matrix
8511: . x - the vector to be multiplied by the interpolation operator
8512: - y - the vector to be added to the result
8514: Output Parameter:
8515: . w - the resulting vector
8517: Level: intermediate
8519: Notes:
8520: `w` may be the same vector as `y`.
8522: This allows one to use either the restriction or interpolation (its transpose)
8523: matrix to do the interpolation
8525: .seealso: [](chapter_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8526: @*/
8527: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8528: {
8529: PetscInt M, N, Ny;
8531: PetscFunctionBegin;
8536: PetscCall(MatGetSize(A, &M, &N));
8537: PetscCall(VecGetSize(y, &Ny));
8538: if (M == Ny) {
8539: PetscCall(MatMultAdd(A, x, y, w));
8540: } else {
8541: PetscCall(MatMultTransposeAdd(A, x, y, w));
8542: }
8543: PetscFunctionReturn(PETSC_SUCCESS);
8544: }
8546: /*@
8547: MatInterpolate - y = A*x or A'*x depending on the shape of
8548: the matrix
8550: Neighbor-wise Collective
8552: Input Parameters:
8553: + mat - the matrix
8554: - x - the vector to be interpolated
8556: Output Parameter:
8557: . y - the resulting vector
8559: Level: intermediate
8561: Note:
8562: This allows one to use either the restriction or interpolation (its transpose)
8563: matrix to do the interpolation
8565: .seealso: [](chapter_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8566: @*/
8567: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8568: {
8569: PetscInt M, N, Ny;
8571: PetscFunctionBegin;
8575: PetscCall(MatGetSize(A, &M, &N));
8576: PetscCall(VecGetSize(y, &Ny));
8577: if (M == Ny) {
8578: PetscCall(MatMult(A, x, y));
8579: } else {
8580: PetscCall(MatMultTranspose(A, x, y));
8581: }
8582: PetscFunctionReturn(PETSC_SUCCESS);
8583: }
8585: /*@
8586: MatRestrict - y = A*x or A'*x
8588: Neighbor-wise Collective
8590: Input Parameters:
8591: + mat - the matrix
8592: - x - the vector to be restricted
8594: Output Parameter:
8595: . y - the resulting vector
8597: Level: intermediate
8599: Note:
8600: This allows one to use either the restriction or interpolation (its transpose)
8601: matrix to do the restriction
8603: .seealso: [](chapter_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8604: @*/
8605: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8606: {
8607: PetscInt M, N, Ny;
8609: PetscFunctionBegin;
8613: PetscCall(MatGetSize(A, &M, &N));
8614: PetscCall(VecGetSize(y, &Ny));
8615: if (M == Ny) {
8616: PetscCall(MatMult(A, x, y));
8617: } else {
8618: PetscCall(MatMultTranspose(A, x, y));
8619: }
8620: PetscFunctionReturn(PETSC_SUCCESS);
8621: }
8623: /*@
8624: MatMatInterpolateAdd - Y = W + A*X or W + A'*X
8626: Neighbor-wise Collective
8628: Input Parameters:
8629: + mat - the matrix
8630: . x - the input dense matrix to be multiplied
8631: - w - the input dense matrix to be added to the result
8633: Output Parameter:
8634: . y - the output dense matrix
8636: Level: intermediate
8638: Note:
8639: This allows one to use either the restriction or interpolation (its transpose)
8640: matrix to do the interpolation. y matrix can be reused if already created with the proper sizes,
8641: otherwise it will be recreated. y must be initialized to `NULL` if not supplied.
8643: .seealso: [](chapter_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8644: @*/
8645: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8646: {
8647: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8648: PetscBool trans = PETSC_TRUE;
8649: MatReuse reuse = MAT_INITIAL_MATRIX;
8651: PetscFunctionBegin;
8657: PetscCall(MatGetSize(A, &M, &N));
8658: PetscCall(MatGetSize(x, &Mx, &Nx));
8659: if (N == Mx) trans = PETSC_FALSE;
8660: else