Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_SetValuesBatch;
40: PetscLogEvent MAT_ViennaCLCopyToGPU;
41: PetscLogEvent MAT_CUDACopyToGPU;
42: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
43: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
44: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
45: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
46: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
48: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
50: /*@
51: MatSetRandom - Sets all components of a matrix to random numbers.
53: Logically Collective
55: Input Parameters:
56: + x - the matrix
57: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
58: it will create one internally.
60: Example:
61: .vb
62: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
63: MatSetRandom(x,rctx);
64: PetscRandomDestroy(rctx);
65: .ve
67: Level: intermediate
69: Notes:
70: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
72: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
74: It generates an error if used on unassembled sparse matrices that have not been preallocated.
76: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
77: @*/
78: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
79: {
80: PetscRandom randObj = NULL;
82: PetscFunctionBegin;
86: MatCheckPreallocated(x, 1);
88: if (!rctx) {
89: MPI_Comm comm;
90: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
91: PetscCall(PetscRandomCreate(comm, &randObj));
92: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
93: PetscCall(PetscRandomSetFromOptions(randObj));
94: rctx = randObj;
95: }
96: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
97: PetscUseTypeMethod(x, setrandom, rctx);
98: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
100: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
101: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(PetscRandomDestroy(&randObj));
103: PetscFunctionReturn(PETSC_SUCCESS);
104: }
106: /*@
107: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
109: Logically Collective
111: Input Parameter:
112: . mat - the factored matrix
114: Output Parameters:
115: + pivot - the pivot value computed
116: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
117: the share the matrix
119: Level: advanced
121: Notes:
122: This routine does not work for factorizations done with external packages.
124: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
126: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
128: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
129: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
130: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
131: @*/
132: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
133: {
134: PetscFunctionBegin;
136: PetscAssertPointer(pivot, 2);
137: PetscAssertPointer(row, 3);
138: *pivot = mat->factorerror_zeropivot_value;
139: *row = mat->factorerror_zeropivot_row;
140: PetscFunctionReturn(PETSC_SUCCESS);
141: }
143: /*@
144: MatFactorGetError - gets the error code from a factorization
146: Logically Collective
148: Input Parameter:
149: . mat - the factored matrix
151: Output Parameter:
152: . err - the error code
154: Level: advanced
156: Note:
157: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
159: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
160: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
161: @*/
162: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
163: {
164: PetscFunctionBegin;
166: PetscAssertPointer(err, 2);
167: *err = mat->factorerrortype;
168: PetscFunctionReturn(PETSC_SUCCESS);
169: }
171: /*@
172: MatFactorClearError - clears the error code in a factorization
174: Logically Collective
176: Input Parameter:
177: . mat - the factored matrix
179: Level: developer
181: Note:
182: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
184: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
185: `MatGetErrorCode()`, `MatFactorError`
186: @*/
187: PetscErrorCode MatFactorClearError(Mat mat)
188: {
189: PetscFunctionBegin;
191: mat->factorerrortype = MAT_FACTOR_NOERROR;
192: mat->factorerror_zeropivot_value = 0.0;
193: mat->factorerror_zeropivot_row = 0;
194: PetscFunctionReturn(PETSC_SUCCESS);
195: }
197: PETSC_INTERN PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
198: {
199: Vec r, l;
200: const PetscScalar *al;
201: PetscInt i, nz, gnz, N, n;
203: PetscFunctionBegin;
204: PetscCall(MatCreateVecs(mat, &r, &l));
205: if (!cols) { /* nonzero rows */
206: PetscCall(MatGetSize(mat, &N, NULL));
207: PetscCall(MatGetLocalSize(mat, &n, NULL));
208: PetscCall(VecSet(l, 0.0));
209: PetscCall(VecSetRandom(r, NULL));
210: PetscCall(MatMult(mat, r, l));
211: PetscCall(VecGetArrayRead(l, &al));
212: } else { /* nonzero columns */
213: PetscCall(MatGetSize(mat, NULL, &N));
214: PetscCall(MatGetLocalSize(mat, NULL, &n));
215: PetscCall(VecSet(r, 0.0));
216: PetscCall(VecSetRandom(l, NULL));
217: PetscCall(MatMultTranspose(mat, l, r));
218: PetscCall(VecGetArrayRead(r, &al));
219: }
220: if (tol <= 0.0) {
221: for (i = 0, nz = 0; i < n; i++)
222: if (al[i] != 0.0) nz++;
223: } else {
224: for (i = 0, nz = 0; i < n; i++)
225: if (PetscAbsScalar(al[i]) > tol) nz++;
226: }
227: PetscCall(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
228: if (gnz != N) {
229: PetscInt *nzr;
230: PetscCall(PetscMalloc1(nz, &nzr));
231: if (nz) {
232: if (tol < 0) {
233: for (i = 0, nz = 0; i < n; i++)
234: if (al[i] != 0.0) nzr[nz++] = i;
235: } else {
236: for (i = 0, nz = 0; i < n; i++)
237: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i;
238: }
239: }
240: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
241: } else *nonzero = NULL;
242: if (!cols) { /* nonzero rows */
243: PetscCall(VecRestoreArrayRead(l, &al));
244: } else {
245: PetscCall(VecRestoreArrayRead(r, &al));
246: }
247: PetscCall(VecDestroy(&l));
248: PetscCall(VecDestroy(&r));
249: PetscFunctionReturn(PETSC_SUCCESS);
250: }
252: /*@
253: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
255: Input Parameter:
256: . mat - the matrix
258: Output Parameter:
259: . keptrows - the rows that are not completely zero
261: Level: intermediate
263: Note:
264: `keptrows` is set to `NULL` if all rows are nonzero.
266: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
267: @*/
268: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
269: {
270: PetscFunctionBegin;
273: PetscAssertPointer(keptrows, 2);
274: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
275: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
276: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
277: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
278: PetscFunctionReturn(PETSC_SUCCESS);
279: }
281: /*@
282: MatFindZeroRows - Locate all rows that are completely zero in the matrix
284: Input Parameter:
285: . mat - the matrix
287: Output Parameter:
288: . zerorows - the rows that are completely zero
290: Level: intermediate
292: Note:
293: `zerorows` is set to `NULL` if no rows are zero.
295: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
296: @*/
297: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
298: {
299: IS keptrows;
300: PetscInt m, n;
302: PetscFunctionBegin;
305: PetscAssertPointer(zerorows, 2);
306: PetscCall(MatFindNonzeroRows(mat, &keptrows));
307: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
308: In keeping with this convention, we set zerorows to NULL if there are no zero
309: rows. */
310: if (keptrows == NULL) {
311: *zerorows = NULL;
312: } else {
313: PetscCall(MatGetOwnershipRange(mat, &m, &n));
314: PetscCall(ISComplement(keptrows, m, n, zerorows));
315: PetscCall(ISDestroy(&keptrows));
316: }
317: PetscFunctionReturn(PETSC_SUCCESS);
318: }
320: /*@
321: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
323: Not Collective
325: Input Parameter:
326: . A - the matrix
328: Output Parameter:
329: . a - the diagonal part (which is a SEQUENTIAL matrix)
331: Level: advanced
333: Notes:
334: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
336: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
338: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
339: @*/
340: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
341: {
342: PetscFunctionBegin;
345: PetscAssertPointer(a, 2);
346: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
347: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
348: else {
349: PetscMPIInt size;
351: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
352: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
353: *a = A;
354: }
355: PetscFunctionReturn(PETSC_SUCCESS);
356: }
358: /*@
359: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
361: Collective
363: Input Parameter:
364: . mat - the matrix
366: Output Parameter:
367: . trace - the sum of the diagonal entries
369: Level: advanced
371: .seealso: [](ch_matrices), `Mat`
372: @*/
373: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
374: {
375: Vec diag;
377: PetscFunctionBegin;
379: PetscAssertPointer(trace, 2);
380: PetscCall(MatCreateVecs(mat, &diag, NULL));
381: PetscCall(MatGetDiagonal(mat, diag));
382: PetscCall(VecSum(diag, trace));
383: PetscCall(VecDestroy(&diag));
384: PetscFunctionReturn(PETSC_SUCCESS);
385: }
387: /*@
388: MatRealPart - Zeros out the imaginary part of the matrix
390: Logically Collective
392: Input Parameter:
393: . mat - the matrix
395: Level: advanced
397: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
398: @*/
399: PetscErrorCode MatRealPart(Mat mat)
400: {
401: PetscFunctionBegin;
404: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
405: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
406: MatCheckPreallocated(mat, 1);
407: PetscUseTypeMethod(mat, realpart);
408: PetscFunctionReturn(PETSC_SUCCESS);
409: }
411: /*@C
412: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
414: Collective
416: Input Parameter:
417: . mat - the matrix
419: Output Parameters:
420: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
421: - ghosts - the global indices of the ghost points
423: Level: advanced
425: Note:
426: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
428: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
429: @*/
430: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
431: {
432: PetscFunctionBegin;
435: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
436: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
437: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
438: else {
439: if (nghosts) *nghosts = 0;
440: if (ghosts) *ghosts = NULL;
441: }
442: PetscFunctionReturn(PETSC_SUCCESS);
443: }
445: /*@
446: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
448: Logically Collective
450: Input Parameter:
451: . mat - the matrix
453: Level: advanced
455: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
456: @*/
457: PetscErrorCode MatImaginaryPart(Mat mat)
458: {
459: PetscFunctionBegin;
462: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
463: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
464: MatCheckPreallocated(mat, 1);
465: PetscUseTypeMethod(mat, imaginarypart);
466: PetscFunctionReturn(PETSC_SUCCESS);
467: }
469: /*@
470: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure
472: Not Collective
474: Input Parameter:
475: . mat - the matrix
477: Output Parameters:
478: + missing - is any diagonal entry missing
479: - dd - first diagonal entry that is missing (optional) on this process
481: Level: advanced
483: Note:
484: This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value
486: .seealso: [](ch_matrices), `Mat`
487: @*/
488: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
489: {
490: PetscFunctionBegin;
493: PetscAssertPointer(missing, 2);
494: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
495: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
496: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
497: PetscFunctionReturn(PETSC_SUCCESS);
498: }
500: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
501: /*@C
502: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
503: for each row that you get to ensure that your application does
504: not bleed memory.
506: Not Collective
508: Input Parameters:
509: + mat - the matrix
510: - row - the row to get
512: Output Parameters:
513: + ncols - if not `NULL`, the number of nonzeros in `row`
514: . cols - if not `NULL`, the column numbers
515: - vals - if not `NULL`, the numerical values
517: Level: advanced
519: Notes:
520: This routine is provided for people who need to have direct access
521: to the structure of a matrix. We hope that we provide enough
522: high-level matrix routines that few users will need it.
524: `MatGetRow()` always returns 0-based column indices, regardless of
525: whether the internal representation is 0-based (default) or 1-based.
527: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
528: not wish to extract these quantities.
530: The user can only examine the values extracted with `MatGetRow()`;
531: the values CANNOT be altered. To change the matrix entries, one
532: must use `MatSetValues()`.
534: You can only have one call to `MatGetRow()` outstanding for a particular
535: matrix at a time, per processor. `MatGetRow()` can only obtain rows
536: associated with the given processor, it cannot get rows from the
537: other processors; for that we suggest using `MatCreateSubMatrices()`, then
538: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
539: is in the global number of rows.
541: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
543: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
545: Fortran Note:
546: The calling sequence is
547: .vb
548: MatGetRow(matrix,row,ncols,cols,values,ierr)
549: Mat matrix (input)
550: integer row (input)
551: integer ncols (output)
552: integer cols(maxcols) (output)
553: double precision (or double complex) values(maxcols) output
554: .ve
555: where maxcols >= maximum nonzeros in any row of the matrix.
557: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
558: @*/
559: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
560: {
561: PetscInt incols;
563: PetscFunctionBegin;
566: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
567: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
568: MatCheckPreallocated(mat, 1);
569: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
570: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
571: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
572: if (ncols) *ncols = incols;
573: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
574: PetscFunctionReturn(PETSC_SUCCESS);
575: }
577: /*@
578: MatConjugate - replaces the matrix values with their complex conjugates
580: Logically Collective
582: Input Parameter:
583: . mat - the matrix
585: Level: advanced
587: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
588: @*/
589: PetscErrorCode MatConjugate(Mat mat)
590: {
591: PetscFunctionBegin;
593: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
594: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
595: PetscUseTypeMethod(mat, conjugate);
596: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
597: }
598: PetscFunctionReturn(PETSC_SUCCESS);
599: }
601: /*@C
602: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
604: Not Collective
606: Input Parameters:
607: + mat - the matrix
608: . row - the row to get
609: . ncols - the number of nonzeros
610: . cols - the columns of the nonzeros
611: - vals - if nonzero the column values
613: Level: advanced
615: Notes:
616: This routine should be called after you have finished examining the entries.
618: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
619: us of the array after it has been restored. If you pass `NULL`, it will
620: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
622: Fortran Notes:
623: The calling sequence is
624: .vb
625: MatRestoreRow(matrix,row,ncols,cols,values,ierr)
626: Mat matrix (input)
627: integer row (input)
628: integer ncols (output)
629: integer cols(maxcols) (output)
630: double precision (or double complex) values(maxcols) output
631: .ve
632: Where maxcols >= maximum nonzeros in any row of the matrix.
634: In Fortran `MatRestoreRow()` MUST be called after `MatGetRow()`
635: before another call to `MatGetRow()` can be made.
637: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
638: @*/
639: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
640: {
641: PetscFunctionBegin;
643: if (ncols) PetscAssertPointer(ncols, 3);
644: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
645: if (!mat->ops->restorerow) PetscFunctionReturn(PETSC_SUCCESS);
646: PetscUseTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
647: if (ncols) *ncols = 0;
648: if (cols) *cols = NULL;
649: if (vals) *vals = NULL;
650: PetscFunctionReturn(PETSC_SUCCESS);
651: }
653: /*@
654: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
655: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
657: Not Collective
659: Input Parameter:
660: . mat - the matrix
662: Level: advanced
664: Note:
665: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
667: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
668: @*/
669: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
670: {
671: PetscFunctionBegin;
674: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
675: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
676: MatCheckPreallocated(mat, 1);
677: if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
678: PetscUseTypeMethod(mat, getrowuppertriangular);
679: PetscFunctionReturn(PETSC_SUCCESS);
680: }
682: /*@
683: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
685: Not Collective
687: Input Parameter:
688: . mat - the matrix
690: Level: advanced
692: Note:
693: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
695: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
696: @*/
697: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
698: {
699: PetscFunctionBegin;
702: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
703: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
704: MatCheckPreallocated(mat, 1);
705: if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
706: PetscUseTypeMethod(mat, restorerowuppertriangular);
707: PetscFunctionReturn(PETSC_SUCCESS);
708: }
710: /*@C
711: MatSetOptionsPrefix - Sets the prefix used for searching for all
712: `Mat` options in the database.
714: Logically Collective
716: Input Parameters:
717: + A - the matrix
718: - prefix - the prefix to prepend to all option names
720: Level: advanced
722: Notes:
723: A hyphen (-) must NOT be given at the beginning of the prefix name.
724: The first character of all runtime options is AUTOMATICALLY the hyphen.
726: This is NOT used for options for the factorization of the matrix. Normally the
727: prefix is automatically passed in from the PC calling the factorization. To set
728: it directly use `MatSetOptionsPrefixFactor()`
730: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
731: @*/
732: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
733: {
734: PetscFunctionBegin;
736: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
737: PetscFunctionReturn(PETSC_SUCCESS);
738: }
740: /*@C
741: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
742: for matrices created with `MatGetFactor()`
744: Logically Collective
746: Input Parameters:
747: + A - the matrix
748: - prefix - the prefix to prepend to all option names for the factored matrix
750: Level: developer
752: Notes:
753: A hyphen (-) must NOT be given at the beginning of the prefix name.
754: The first character of all runtime options is AUTOMATICALLY the hyphen.
756: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
757: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
759: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
760: @*/
761: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
762: {
763: PetscFunctionBegin;
765: if (prefix) {
766: PetscAssertPointer(prefix, 2);
767: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
768: if (prefix != A->factorprefix) {
769: PetscCall(PetscFree(A->factorprefix));
770: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
771: }
772: } else PetscCall(PetscFree(A->factorprefix));
773: PetscFunctionReturn(PETSC_SUCCESS);
774: }
776: /*@C
777: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
778: for matrices created with `MatGetFactor()`
780: Logically Collective
782: Input Parameters:
783: + A - the matrix
784: - prefix - the prefix to prepend to all option names for the factored matrix
786: Level: developer
788: Notes:
789: A hyphen (-) must NOT be given at the beginning of the prefix name.
790: The first character of all runtime options is AUTOMATICALLY the hyphen.
792: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
793: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
795: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
796: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
797: `MatSetOptionsPrefix()`
798: @*/
799: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
800: {
801: size_t len1, len2, new_len;
803: PetscFunctionBegin;
805: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
806: if (!A->factorprefix) {
807: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
808: PetscFunctionReturn(PETSC_SUCCESS);
809: }
810: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
812: PetscCall(PetscStrlen(A->factorprefix, &len1));
813: PetscCall(PetscStrlen(prefix, &len2));
814: new_len = len1 + len2 + 1;
815: PetscCall(PetscRealloc(new_len * sizeof(*(A->factorprefix)), &A->factorprefix));
816: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
817: PetscFunctionReturn(PETSC_SUCCESS);
818: }
820: /*@C
821: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
822: matrix options in the database.
824: Logically Collective
826: Input Parameters:
827: + A - the matrix
828: - prefix - the prefix to prepend to all option names
830: Level: advanced
832: Note:
833: A hyphen (-) must NOT be given at the beginning of the prefix name.
834: The first character of all runtime options is AUTOMATICALLY the hyphen.
836: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
837: @*/
838: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
839: {
840: PetscFunctionBegin;
842: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
843: PetscFunctionReturn(PETSC_SUCCESS);
844: }
846: /*@C
847: MatGetOptionsPrefix - Gets the prefix used for searching for all
848: matrix options in the database.
850: Not Collective
852: Input Parameter:
853: . A - the matrix
855: Output Parameter:
856: . prefix - pointer to the prefix string used
858: Level: advanced
860: Fortran Note:
861: The user should pass in a string `prefix` of
862: sufficient length to hold the prefix.
864: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
865: @*/
866: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
867: {
868: PetscFunctionBegin;
870: PetscAssertPointer(prefix, 2);
871: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
872: PetscFunctionReturn(PETSC_SUCCESS);
873: }
875: /*@
876: MatResetPreallocation - Reset matrix to use the original nonzero pattern provided by the user.
878: Collective
880: Input Parameter:
881: . A - the matrix
883: Level: beginner
885: Notes:
886: The allocated memory will be shrunk after calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
888: Users can reset the preallocation to access the original memory.
890: Currently only supported for `MATAIJ` matrices.
892: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
893: @*/
894: PetscErrorCode MatResetPreallocation(Mat A)
895: {
896: PetscFunctionBegin;
899: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset preallocation after setting some values but not yet calling MatAssemblyBegin()/MatAsssemblyEnd()");
900: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
901: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
902: PetscFunctionReturn(PETSC_SUCCESS);
903: }
905: /*@
906: MatSetUp - Sets up the internal matrix data structures for later use.
908: Collective
910: Input Parameter:
911: . A - the matrix
913: Level: intermediate
915: Notes:
916: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
917: setting values in the matrix.
919: This routine is called internally by other matrix functions when needed so rarely needs to be called by users
921: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
922: @*/
923: PetscErrorCode MatSetUp(Mat A)
924: {
925: PetscFunctionBegin;
927: if (!((PetscObject)A)->type_name) {
928: PetscMPIInt size;
930: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
931: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
932: }
933: if (!A->preallocated) PetscTryTypeMethod(A, setup);
934: PetscCall(PetscLayoutSetUp(A->rmap));
935: PetscCall(PetscLayoutSetUp(A->cmap));
936: A->preallocated = PETSC_TRUE;
937: PetscFunctionReturn(PETSC_SUCCESS);
938: }
940: #if defined(PETSC_HAVE_SAWS)
941: #include <petscviewersaws.h>
942: #endif
944: /*
945: If threadsafety is on extraneous matrices may be printed
947: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
948: */
949: #if !defined(PETSC_HAVE_THREADSAFETY)
950: static PetscInt insidematview = 0;
951: #endif
953: /*@C
954: MatViewFromOptions - View properties of the matrix based on options set in the options database
956: Collective
958: Input Parameters:
959: + A - the matrix
960: . obj - optional additional object that provides the options prefix to use
961: - name - command line option
963: Options Database Key:
964: . -mat_view [viewertype]:... - the viewer and its options
966: Level: intermediate
968: Note:
969: .vb
970: If no value is provided ascii:stdout is used
971: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
972: for example ascii::ascii_info prints just the information about the object not all details
973: unless :append is given filename opens in write mode, overwriting what was already there
974: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
975: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
976: socket[:port] defaults to the standard output port
977: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
978: .ve
980: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
981: @*/
982: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
983: {
984: PetscFunctionBegin;
986: #if !defined(PETSC_HAVE_THREADSAFETY)
987: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
988: #endif
989: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
990: PetscFunctionReturn(PETSC_SUCCESS);
991: }
993: /*@C
994: MatView - display information about a matrix in a variety ways
996: Collective
998: Input Parameters:
999: + mat - the matrix
1000: - viewer - visualization context
1002: Options Database Keys:
1003: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1004: . -mat_view ::ascii_info_detail - Prints more detailed info
1005: . -mat_view - Prints matrix in ASCII format
1006: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1007: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1008: . -display <name> - Sets display name (default is host)
1009: . -draw_pause <sec> - Sets number of seconds to pause after display
1010: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1011: . -viewer_socket_machine <machine> - -
1012: . -viewer_socket_port <port> - -
1013: . -mat_view binary - save matrix to file in binary format
1014: - -viewer_binary_filename <name> - -
1016: Level: beginner
1018: Notes:
1019: The available visualization contexts include
1020: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1021: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1022: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1023: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1025: The user can open alternative visualization contexts with
1026: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1027: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a
1028: specified file; corresponding input uses `MatLoad()`
1029: . `PetscViewerDrawOpen()` - Outputs nonzero matrix structure to
1030: an X window display
1031: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer.
1032: Currently only the `MATSEQDENSE` and `MATAIJ`
1033: matrix types support the Socket viewer.
1035: The user can call `PetscViewerPushFormat()` to specify the output
1036: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1037: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1038: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1039: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1040: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1041: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse
1042: format common among all matrix types
1043: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific
1044: format (which is in many cases the same as the default)
1045: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix
1046: size and structure (not the matrix entries)
1047: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about
1048: the matrix structure
1050: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1051: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1053: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1055: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1056: viewer is used.
1058: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1059: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1061: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1062: and then use the following mouse functions.
1063: .vb
1064: left mouse: zoom in
1065: middle mouse: zoom out
1066: right mouse: continue with the simulation
1067: .ve
1069: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1070: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1071: @*/
1072: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1073: {
1074: PetscInt rows, cols, rbs, cbs;
1075: PetscBool isascii, isstring, issaws;
1076: PetscViewerFormat format;
1077: PetscMPIInt size;
1079: PetscFunctionBegin;
1082: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1084: PetscCheckSameComm(mat, 1, viewer, 2);
1086: PetscCall(PetscViewerGetFormat(viewer, &format));
1087: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
1088: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1090: #if !defined(PETSC_HAVE_THREADSAFETY)
1091: insidematview++;
1092: #endif
1093: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1094: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1095: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1096: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1098: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1099: if (isascii) {
1100: if (!mat->preallocated) {
1101: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1102: #if !defined(PETSC_HAVE_THREADSAFETY)
1103: insidematview--;
1104: #endif
1105: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1106: PetscFunctionReturn(PETSC_SUCCESS);
1107: }
1108: if (!mat->assembled) {
1109: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1110: #if !defined(PETSC_HAVE_THREADSAFETY)
1111: insidematview--;
1112: #endif
1113: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1114: PetscFunctionReturn(PETSC_SUCCESS);
1115: }
1116: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1117: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1118: MatNullSpace nullsp, transnullsp;
1120: PetscCall(PetscViewerASCIIPushTab(viewer));
1121: PetscCall(MatGetSize(mat, &rows, &cols));
1122: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1123: if (rbs != 1 || cbs != 1) {
1124: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "\n", rows, cols, rbs, cbs));
1125: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "\n", rows, cols, rbs));
1126: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1127: if (mat->factortype) {
1128: MatSolverType solver;
1129: PetscCall(MatFactorGetSolverType(mat, &solver));
1130: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1131: }
1132: if (mat->ops->getinfo) {
1133: MatInfo info;
1134: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1135: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1136: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1137: }
1138: PetscCall(MatGetNullSpace(mat, &nullsp));
1139: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1140: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1141: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1142: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1143: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1144: PetscCall(PetscViewerASCIIPushTab(viewer));
1145: PetscCall(MatProductView(mat, viewer));
1146: PetscCall(PetscViewerASCIIPopTab(viewer));
1147: }
1148: } else if (issaws) {
1149: #if defined(PETSC_HAVE_SAWS)
1150: PetscMPIInt rank;
1152: PetscCall(PetscObjectName((PetscObject)mat));
1153: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1154: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1155: #endif
1156: } else if (isstring) {
1157: const char *type;
1158: PetscCall(MatGetType(mat, &type));
1159: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1160: PetscTryTypeMethod(mat, view, viewer);
1161: }
1162: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1163: PetscCall(PetscViewerASCIIPushTab(viewer));
1164: PetscUseTypeMethod(mat, viewnative, viewer);
1165: PetscCall(PetscViewerASCIIPopTab(viewer));
1166: } else if (mat->ops->view) {
1167: PetscCall(PetscViewerASCIIPushTab(viewer));
1168: PetscUseTypeMethod(mat, view, viewer);
1169: PetscCall(PetscViewerASCIIPopTab(viewer));
1170: }
1171: if (isascii) {
1172: PetscCall(PetscViewerGetFormat(viewer, &format));
1173: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1174: }
1175: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1176: #if !defined(PETSC_HAVE_THREADSAFETY)
1177: insidematview--;
1178: #endif
1179: PetscFunctionReturn(PETSC_SUCCESS);
1180: }
1182: #if defined(PETSC_USE_DEBUG)
1183: #include <../src/sys/totalview/tv_data_display.h>
1184: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1185: {
1186: TV_add_row("Local rows", "int", &mat->rmap->n);
1187: TV_add_row("Local columns", "int", &mat->cmap->n);
1188: TV_add_row("Global rows", "int", &mat->rmap->N);
1189: TV_add_row("Global columns", "int", &mat->cmap->N);
1190: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1191: return TV_format_OK;
1192: }
1193: #endif
1195: /*@C
1196: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1197: with `MatView()`. The matrix format is determined from the options database.
1198: Generates a parallel MPI matrix if the communicator has more than one
1199: processor. The default matrix type is `MATAIJ`.
1201: Collective
1203: Input Parameters:
1204: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1205: or some related function before a call to `MatLoad()`
1206: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1208: Options Database Keys:
1209: Used with block matrix formats (`MATSEQBAIJ`, ...) to specify
1210: block size
1211: . -matload_block_size <bs> - set block size
1213: Level: beginner
1215: Notes:
1216: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1217: `Mat` before calling this routine if you wish to set it from the options database.
1219: `MatLoad()` automatically loads into the options database any options
1220: given in the file filename.info where filename is the name of the file
1221: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1222: file will be ignored if you use the -viewer_binary_skip_info option.
1224: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1225: sets the default matrix type AIJ and sets the local and global sizes.
1226: If type and/or size is already set, then the same are used.
1228: In parallel, each processor can load a subset of rows (or the
1229: entire matrix). This routine is especially useful when a large
1230: matrix is stored on disk and only part of it is desired on each
1231: processor. For example, a parallel solver may access only some of
1232: the rows from each processor. The algorithm used here reads
1233: relatively small blocks of data rather than reading the entire
1234: matrix and then subsetting it.
1236: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1237: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1238: or the sequence like
1239: .vb
1240: `PetscViewer` v;
1241: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1242: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1243: `PetscViewerSetFromOptions`(v);
1244: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1245: `PetscViewerFileSetName`(v,"datafile");
1246: .ve
1247: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1248: $ -viewer_type {binary, hdf5}
1250: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1251: and src/mat/tutorials/ex10.c with the second approach.
1253: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1254: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1255: Multiple objects, both matrices and vectors, can be stored within the same file.
1256: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1258: Most users should not need to know the details of the binary storage
1259: format, since `MatLoad()` and `MatView()` completely hide these details.
1260: But for anyone who is interested, the standard binary matrix storage
1261: format is
1263: .vb
1264: PetscInt MAT_FILE_CLASSID
1265: PetscInt number of rows
1266: PetscInt number of columns
1267: PetscInt total number of nonzeros
1268: PetscInt *number nonzeros in each row
1269: PetscInt *column indices of all nonzeros (starting index is zero)
1270: PetscScalar *values of all nonzeros
1271: .ve
1272: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1273: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1274: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1276: PETSc automatically does the byte swapping for
1277: machines that store the bytes reversed. Thus if you write your own binary
1278: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1279: and `PetscBinaryWrite()` to see how this may be done.
1281: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1282: Each processor's chunk is loaded independently by its owning MPI process.
1283: Multiple objects, both matrices and vectors, can be stored within the same file.
1284: They are looked up by their PetscObject name.
1286: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1287: by default the same structure and naming of the AIJ arrays and column count
1288: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1289: $ save example.mat A b -v7.3
1290: can be directly read by this routine (see Reference 1 for details).
1292: Depending on your MATLAB version, this format might be a default,
1293: otherwise you can set it as default in Preferences.
1295: Unless -nocompression flag is used to save the file in MATLAB,
1296: PETSc must be configured with ZLIB package.
1298: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1300: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1302: Corresponding `MatView()` is not yet implemented.
1304: The loaded matrix is actually a transpose of the original one in MATLAB,
1305: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1306: With this format, matrix is automatically transposed by PETSc,
1307: unless the matrix is marked as SPD or symmetric
1308: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1310: References:
1311: . * - MATLAB(R) Documentation, manual page of save(), https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version
1313: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1314: @*/
1315: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1316: {
1317: PetscBool flg;
1319: PetscFunctionBegin;
1323: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1325: flg = PETSC_FALSE;
1326: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1327: if (flg) {
1328: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1329: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1330: }
1331: flg = PETSC_FALSE;
1332: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1333: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1335: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1336: PetscUseTypeMethod(mat, load, viewer);
1337: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1338: PetscFunctionReturn(PETSC_SUCCESS);
1339: }
1341: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1342: {
1343: Mat_Redundant *redund = *redundant;
1345: PetscFunctionBegin;
1346: if (redund) {
1347: if (redund->matseq) { /* via MatCreateSubMatrices() */
1348: PetscCall(ISDestroy(&redund->isrow));
1349: PetscCall(ISDestroy(&redund->iscol));
1350: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1351: } else {
1352: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1353: PetscCall(PetscFree(redund->sbuf_j));
1354: PetscCall(PetscFree(redund->sbuf_a));
1355: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1356: PetscCall(PetscFree(redund->rbuf_j[i]));
1357: PetscCall(PetscFree(redund->rbuf_a[i]));
1358: }
1359: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1360: }
1362: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1363: PetscCall(PetscFree(redund));
1364: }
1365: PetscFunctionReturn(PETSC_SUCCESS);
1366: }
1368: /*@C
1369: MatDestroy - Frees space taken by a matrix.
1371: Collective
1373: Input Parameter:
1374: . A - the matrix
1376: Level: beginner
1378: Developer Note:
1379: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1380: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1381: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1382: if changes are needed here.
1384: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1385: @*/
1386: PetscErrorCode MatDestroy(Mat *A)
1387: {
1388: PetscFunctionBegin;
1389: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1391: if (--((PetscObject)(*A))->refct > 0) {
1392: *A = NULL;
1393: PetscFunctionReturn(PETSC_SUCCESS);
1394: }
1396: /* if memory was published with SAWs then destroy it */
1397: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1398: PetscTryTypeMethod((*A), destroy);
1400: PetscCall(PetscFree((*A)->factorprefix));
1401: PetscCall(PetscFree((*A)->defaultvectype));
1402: PetscCall(PetscFree((*A)->defaultrandtype));
1403: PetscCall(PetscFree((*A)->bsizes));
1404: PetscCall(PetscFree((*A)->solvertype));
1405: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1406: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1407: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1408: PetscCall(MatProductClear(*A));
1409: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1410: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1411: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1412: PetscCall(MatDestroy(&(*A)->schur));
1413: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1414: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1415: PetscCall(PetscHeaderDestroy(A));
1416: PetscFunctionReturn(PETSC_SUCCESS);
1417: }
1419: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1420: /*@C
1421: MatSetValues - Inserts or adds a block of values into a matrix.
1422: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1423: MUST be called after all calls to `MatSetValues()` have been completed.
1425: Not Collective
1427: Input Parameters:
1428: + mat - the matrix
1429: . v - a logically two-dimensional array of values
1430: . m - the number of rows
1431: . idxm - the global indices of the rows
1432: . n - the number of columns
1433: . idxn - the global indices of the columns
1434: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1436: Level: beginner
1438: Notes:
1439: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1441: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1442: options cannot be mixed without intervening calls to the assembly
1443: routines.
1445: `MatSetValues()` uses 0-based row and column numbers in Fortran
1446: as well as in C.
1448: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1449: simply ignored. This allows easily inserting element stiffness matrices
1450: with homogeneous Dirichlet boundary conditions that you don't want represented
1451: in the matrix.
1453: Efficiency Alert:
1454: The routine `MatSetValuesBlocked()` may offer much better efficiency
1455: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1457: Developer Note:
1458: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1459: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1461: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1462: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1463: @*/
1464: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1465: {
1466: PetscFunctionBeginHot;
1469: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1470: PetscAssertPointer(idxm, 3);
1471: PetscAssertPointer(idxn, 5);
1472: MatCheckPreallocated(mat, 1);
1474: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1475: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1477: if (PetscDefined(USE_DEBUG)) {
1478: PetscInt i, j;
1480: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1481: for (i = 0; i < m; i++) {
1482: for (j = 0; j < n; j++) {
1483: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1484: #if defined(PETSC_USE_COMPLEX)
1485: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1486: #else
1487: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1488: #endif
1489: }
1490: }
1491: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1492: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1493: }
1495: if (mat->assembled) {
1496: mat->was_assembled = PETSC_TRUE;
1497: mat->assembled = PETSC_FALSE;
1498: }
1499: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1500: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1501: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1502: PetscFunctionReturn(PETSC_SUCCESS);
1503: }
1505: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1506: /*@C
1507: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1508: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1509: MUST be called after all calls to `MatSetValues()` have been completed.
1511: Not Collective
1513: Input Parameters:
1514: + mat - the matrix
1515: . v - a logically two-dimensional array of values
1516: . ism - the rows to provide
1517: . isn - the columns to provide
1518: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1520: Level: beginner
1522: Notes:
1523: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1525: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1526: options cannot be mixed without intervening calls to the assembly
1527: routines.
1529: `MatSetValues()` uses 0-based row and column numbers in Fortran
1530: as well as in C.
1532: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1533: simply ignored. This allows easily inserting element stiffness matrices
1534: with homogeneous Dirichlet boundary conditions that you don't want represented
1535: in the matrix.
1537: Efficiency Alert:
1538: The routine `MatSetValuesBlocked()` may offer much better efficiency
1539: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1541: This is currently not optimized for any particular `ISType`
1543: Developer Note:
1544: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1545: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1547: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1548: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1549: @*/
1550: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1551: {
1552: PetscInt m, n;
1553: const PetscInt *rows, *cols;
1555: PetscFunctionBeginHot;
1557: PetscCall(ISGetIndices(ism, &rows));
1558: PetscCall(ISGetIndices(isn, &cols));
1559: PetscCall(ISGetLocalSize(ism, &m));
1560: PetscCall(ISGetLocalSize(isn, &n));
1561: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1562: PetscCall(ISRestoreIndices(ism, &rows));
1563: PetscCall(ISRestoreIndices(isn, &cols));
1564: PetscFunctionReturn(PETSC_SUCCESS);
1565: }
1567: /*@
1568: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1569: values into a matrix
1571: Not Collective
1573: Input Parameters:
1574: + mat - the matrix
1575: . row - the (block) row to set
1576: - v - a logically two-dimensional array of values
1578: Level: intermediate
1580: Notes:
1581: The values, `v`, are column-oriented (for the block version) and sorted
1583: All the nonzero values in `row` must be provided
1585: The matrix must have previously had its column indices set, likely by having been assembled.
1587: `row` must belong to this MPI process
1589: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1590: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1591: @*/
1592: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1593: {
1594: PetscInt globalrow;
1596: PetscFunctionBegin;
1599: PetscAssertPointer(v, 3);
1600: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1601: PetscCall(MatSetValuesRow(mat, globalrow, v));
1602: PetscFunctionReturn(PETSC_SUCCESS);
1603: }
1605: /*@
1606: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1607: values into a matrix
1609: Not Collective
1611: Input Parameters:
1612: + mat - the matrix
1613: . row - the (block) row to set
1614: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1616: Level: advanced
1618: Notes:
1619: The values, `v`, are column-oriented for the block version.
1621: All the nonzeros in `row` must be provided
1623: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1625: `row` must belong to this process
1627: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1628: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1629: @*/
1630: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1631: {
1632: PetscFunctionBeginHot;
1635: MatCheckPreallocated(mat, 1);
1636: PetscAssertPointer(v, 3);
1637: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1638: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1639: mat->insertmode = INSERT_VALUES;
1641: if (mat->assembled) {
1642: mat->was_assembled = PETSC_TRUE;
1643: mat->assembled = PETSC_FALSE;
1644: }
1645: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1646: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1647: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1648: PetscFunctionReturn(PETSC_SUCCESS);
1649: }
1651: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1652: /*@
1653: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1654: Using structured grid indexing
1656: Not Collective
1658: Input Parameters:
1659: + mat - the matrix
1660: . m - number of rows being entered
1661: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1662: . n - number of columns being entered
1663: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1664: . v - a logically two-dimensional array of values
1665: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1667: Level: beginner
1669: Notes:
1670: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1672: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1673: options cannot be mixed without intervening calls to the assembly
1674: routines.
1676: The grid coordinates are across the entire grid, not just the local portion
1678: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1679: as well as in C.
1681: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1683: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1684: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1686: The columns and rows in the stencil passed in MUST be contained within the
1687: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1688: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1689: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1690: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1692: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1693: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1694: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1695: `DM_BOUNDARY_PERIODIC` boundary type.
1697: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1698: a single value per point) you can skip filling those indices.
1700: Inspired by the structured grid interface to the HYPRE package
1701: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1703: Efficiency Alert:
1704: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1705: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1707: Fortran Note:
1708: `idxm` and `idxn` should be declared as
1709: $ MatStencil idxm(4,m),idxn(4,n)
1710: and the values inserted using
1711: .vb
1712: idxm(MatStencil_i,1) = i
1713: idxm(MatStencil_j,1) = j
1714: idxm(MatStencil_k,1) = k
1715: idxm(MatStencil_c,1) = c
1716: etc
1717: .ve
1719: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1720: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1721: @*/
1722: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1723: {
1724: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1725: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1726: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1728: PetscFunctionBegin;
1729: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1732: PetscAssertPointer(idxm, 3);
1733: PetscAssertPointer(idxn, 5);
1735: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1736: jdxm = buf;
1737: jdxn = buf + m;
1738: } else {
1739: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1740: jdxm = bufm;
1741: jdxn = bufn;
1742: }
1743: for (i = 0; i < m; i++) {
1744: for (j = 0; j < 3 - sdim; j++) dxm++;
1745: tmp = *dxm++ - starts[0];
1746: for (j = 0; j < dim - 1; j++) {
1747: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1748: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1749: }
1750: if (mat->stencil.noc) dxm++;
1751: jdxm[i] = tmp;
1752: }
1753: for (i = 0; i < n; i++) {
1754: for (j = 0; j < 3 - sdim; j++) dxn++;
1755: tmp = *dxn++ - starts[0];
1756: for (j = 0; j < dim - 1; j++) {
1757: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1758: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1759: }
1760: if (mat->stencil.noc) dxn++;
1761: jdxn[i] = tmp;
1762: }
1763: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1764: PetscCall(PetscFree2(bufm, bufn));
1765: PetscFunctionReturn(PETSC_SUCCESS);
1766: }
1768: /*@
1769: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1770: Using structured grid indexing
1772: Not Collective
1774: Input Parameters:
1775: + mat - the matrix
1776: . m - number of rows being entered
1777: . idxm - grid coordinates for matrix rows being entered
1778: . n - number of columns being entered
1779: . idxn - grid coordinates for matrix columns being entered
1780: . v - a logically two-dimensional array of values
1781: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1783: Level: beginner
1785: Notes:
1786: By default the values, `v`, are row-oriented and unsorted.
1787: See `MatSetOption()` for other options.
1789: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1790: options cannot be mixed without intervening calls to the assembly
1791: routines.
1793: The grid coordinates are across the entire grid, not just the local portion
1795: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1796: as well as in C.
1798: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1800: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1801: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1803: The columns and rows in the stencil passed in MUST be contained within the
1804: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1805: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1806: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1807: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1809: Negative indices may be passed in idxm and idxn, these rows and columns are
1810: simply ignored. This allows easily inserting element stiffness matrices
1811: with homogeneous Dirichlet boundary conditions that you don't want represented
1812: in the matrix.
1814: Inspired by the structured grid interface to the HYPRE package
1815: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1817: Fortran Note:
1818: `idxm` and `idxn` should be declared as
1819: $ MatStencil idxm(4,m),idxn(4,n)
1820: and the values inserted using
1821: .vb
1822: idxm(MatStencil_i,1) = i
1823: idxm(MatStencil_j,1) = j
1824: idxm(MatStencil_k,1) = k
1825: etc
1826: .ve
1828: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1829: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1830: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1831: @*/
1832: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1833: {
1834: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1835: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1836: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1838: PetscFunctionBegin;
1839: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1842: PetscAssertPointer(idxm, 3);
1843: PetscAssertPointer(idxn, 5);
1844: PetscAssertPointer(v, 6);
1846: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1847: jdxm = buf;
1848: jdxn = buf + m;
1849: } else {
1850: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1851: jdxm = bufm;
1852: jdxn = bufn;
1853: }
1854: for (i = 0; i < m; i++) {
1855: for (j = 0; j < 3 - sdim; j++) dxm++;
1856: tmp = *dxm++ - starts[0];
1857: for (j = 0; j < sdim - 1; j++) {
1858: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1859: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1860: }
1861: dxm++;
1862: jdxm[i] = tmp;
1863: }
1864: for (i = 0; i < n; i++) {
1865: for (j = 0; j < 3 - sdim; j++) dxn++;
1866: tmp = *dxn++ - starts[0];
1867: for (j = 0; j < sdim - 1; j++) {
1868: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1869: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1870: }
1871: dxn++;
1872: jdxn[i] = tmp;
1873: }
1874: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1875: PetscCall(PetscFree2(bufm, bufn));
1876: PetscFunctionReturn(PETSC_SUCCESS);
1877: }
1879: /*@
1880: MatSetStencil - Sets the grid information for setting values into a matrix via
1881: `MatSetValuesStencil()`
1883: Not Collective
1885: Input Parameters:
1886: + mat - the matrix
1887: . dim - dimension of the grid 1, 2, or 3
1888: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1889: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1890: - dof - number of degrees of freedom per node
1892: Level: beginner
1894: Notes:
1895: Inspired by the structured grid interface to the HYPRE package
1896: (www.llnl.gov/CASC/hyper)
1898: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1899: user.
1901: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1902: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1903: @*/
1904: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1905: {
1906: PetscFunctionBegin;
1908: PetscAssertPointer(dims, 3);
1909: PetscAssertPointer(starts, 4);
1911: mat->stencil.dim = dim + (dof > 1);
1912: for (PetscInt i = 0; i < dim; i++) {
1913: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1914: mat->stencil.starts[i] = starts[dim - i - 1];
1915: }
1916: mat->stencil.dims[dim] = dof;
1917: mat->stencil.starts[dim] = 0;
1918: mat->stencil.noc = (PetscBool)(dof == 1);
1919: PetscFunctionReturn(PETSC_SUCCESS);
1920: }
1922: /*@C
1923: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1925: Not Collective
1927: Input Parameters:
1928: + mat - the matrix
1929: . v - a logically two-dimensional array of values
1930: . m - the number of block rows
1931: . idxm - the global block indices
1932: . n - the number of block columns
1933: . idxn - the global block indices
1934: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
1936: Level: intermediate
1938: Notes:
1939: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
1940: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
1942: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
1943: NOT the total number of rows/columns; for example, if the block size is 2 and
1944: you are passing in values for rows 2,3,4,5 then m would be 2 (not 4).
1945: The values in idxm would be 1 2; that is the first index for each block divided by
1946: the block size.
1948: You must call `MatSetBlockSize()` when constructing this matrix (before
1949: preallocating it).
1951: By default the values, `v`, are row-oriented, so the layout of
1952: `v` is the same as for `MatSetValues()`. See `MatSetOption()` for other options.
1954: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
1955: options cannot be mixed without intervening calls to the assembly
1956: routines.
1958: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
1959: as well as in C.
1961: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1962: simply ignored. This allows easily inserting element stiffness matrices
1963: with homogeneous Dirichlet boundary conditions that you don't want represented
1964: in the matrix.
1966: Each time an entry is set within a sparse matrix via `MatSetValues()`,
1967: internal searching must be done to determine where to place the
1968: data in the matrix storage space. By instead inserting blocks of
1969: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
1970: reduced.
1972: Example:
1973: .vb
1974: Suppose m=n=2 and block size(bs) = 2 The array is
1976: 1 2 | 3 4
1977: 5 6 | 7 8
1978: - - - | - - -
1979: 9 10 | 11 12
1980: 13 14 | 15 16
1982: v[] should be passed in like
1983: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
1985: If you are not using row oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
1986: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
1987: .ve
1989: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
1990: @*/
1991: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1992: {
1993: PetscFunctionBeginHot;
1996: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1997: PetscAssertPointer(idxm, 3);
1998: PetscAssertPointer(idxn, 5);
1999: MatCheckPreallocated(mat, 1);
2000: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2001: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2002: if (PetscDefined(USE_DEBUG)) {
2003: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2004: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2005: }
2006: if (PetscDefined(USE_DEBUG)) {
2007: PetscInt rbs, cbs, M, N, i;
2008: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2009: PetscCall(MatGetSize(mat, &M, &N));
2010: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block index %" PetscInt_FMT " (index %" PetscInt_FMT ") greater than row length %" PetscInt_FMT, i, idxm[i], M);
2011: for (i = 0; i < n; i++) PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block index %" PetscInt_FMT " (index %" PetscInt_FMT ") great than column length %" PetscInt_FMT, i, idxn[i], N);
2012: }
2013: if (mat->assembled) {
2014: mat->was_assembled = PETSC_TRUE;
2015: mat->assembled = PETSC_FALSE;
2016: }
2017: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2018: if (mat->ops->setvaluesblocked) {
2019: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2020: } else {
2021: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2022: PetscInt i, j, bs, cbs;
2024: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2025: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2026: iidxm = buf;
2027: iidxn = buf + m * bs;
2028: } else {
2029: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2030: iidxm = bufr;
2031: iidxn = bufc;
2032: }
2033: for (i = 0; i < m; i++) {
2034: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2035: }
2036: if (m != n || bs != cbs || idxm != idxn) {
2037: for (i = 0; i < n; i++) {
2038: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2039: }
2040: } else iidxn = iidxm;
2041: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2042: PetscCall(PetscFree2(bufr, bufc));
2043: }
2044: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2045: PetscFunctionReturn(PETSC_SUCCESS);
2046: }
2048: /*@C
2049: MatGetValues - Gets a block of local values from a matrix.
2051: Not Collective; can only return values that are owned by the give process
2053: Input Parameters:
2054: + mat - the matrix
2055: . v - a logically two-dimensional array for storing the values
2056: . m - the number of rows
2057: . idxm - the global indices of the rows
2058: . n - the number of columns
2059: - idxn - the global indices of the columns
2061: Level: advanced
2063: Notes:
2064: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2065: The values, `v`, are then returned in a row-oriented format,
2066: analogous to that used by default in `MatSetValues()`.
2068: `MatGetValues()` uses 0-based row and column numbers in
2069: Fortran as well as in C.
2071: `MatGetValues()` requires that the matrix has been assembled
2072: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2073: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2074: without intermediate matrix assembly.
2076: Negative row or column indices will be ignored and those locations in `v` will be
2077: left unchanged.
2079: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2080: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2081: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2083: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2084: @*/
2085: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2086: {
2087: PetscFunctionBegin;
2090: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2091: PetscAssertPointer(idxm, 3);
2092: PetscAssertPointer(idxn, 5);
2093: PetscAssertPointer(v, 6);
2094: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2095: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2096: MatCheckPreallocated(mat, 1);
2098: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2099: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2100: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2101: PetscFunctionReturn(PETSC_SUCCESS);
2102: }
2104: /*@C
2105: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2106: defined previously by `MatSetLocalToGlobalMapping()`
2108: Not Collective
2110: Input Parameters:
2111: + mat - the matrix
2112: . nrow - number of rows
2113: . irow - the row local indices
2114: . ncol - number of columns
2115: - icol - the column local indices
2117: Output Parameter:
2118: . y - a logically two-dimensional array of values
2120: Level: advanced
2122: Notes:
2123: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2125: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2126: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2127: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2128: with `MatSetLocalToGlobalMapping()`.
2130: Developer Note:
2131: This is labelled with C so does not automatically generate Fortran stubs and interfaces
2132: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2134: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2135: `MatSetValuesLocal()`, `MatGetValues()`
2136: @*/
2137: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2138: {
2139: PetscFunctionBeginHot;
2142: MatCheckPreallocated(mat, 1);
2143: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2144: PetscAssertPointer(irow, 3);
2145: PetscAssertPointer(icol, 5);
2146: if (PetscDefined(USE_DEBUG)) {
2147: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2148: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2149: }
2150: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2151: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2152: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2153: else {
2154: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2155: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2156: irowm = buf;
2157: icolm = buf + nrow;
2158: } else {
2159: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2160: irowm = bufr;
2161: icolm = bufc;
2162: }
2163: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2164: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2165: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2166: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2167: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2168: PetscCall(PetscFree2(bufr, bufc));
2169: }
2170: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2171: PetscFunctionReturn(PETSC_SUCCESS);
2172: }
2174: /*@
2175: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2176: the same size. Currently, this can only be called once and creates the given matrix.
2178: Not Collective
2180: Input Parameters:
2181: + mat - the matrix
2182: . nb - the number of blocks
2183: . bs - the number of rows (and columns) in each block
2184: . rows - a concatenation of the rows for each block
2185: - v - a concatenation of logically two-dimensional arrays of values
2187: Level: advanced
2189: Notes:
2190: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2192: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2194: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2195: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2196: @*/
2197: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2198: {
2199: PetscFunctionBegin;
2202: PetscAssertPointer(rows, 4);
2203: PetscAssertPointer(v, 5);
2204: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2206: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2207: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2208: else {
2209: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2210: }
2211: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2212: PetscFunctionReturn(PETSC_SUCCESS);
2213: }
2215: /*@
2216: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2217: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2218: using a local (per-processor) numbering.
2220: Not Collective
2222: Input Parameters:
2223: + x - the matrix
2224: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2225: - cmapping - column mapping
2227: Level: intermediate
2229: Note:
2230: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2232: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2233: @*/
2234: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2235: {
2236: PetscFunctionBegin;
2241: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2242: else {
2243: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2244: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2245: }
2246: PetscFunctionReturn(PETSC_SUCCESS);
2247: }
2249: /*@
2250: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2252: Not Collective
2254: Input Parameter:
2255: . A - the matrix
2257: Output Parameters:
2258: + rmapping - row mapping
2259: - cmapping - column mapping
2261: Level: advanced
2263: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2264: @*/
2265: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2266: {
2267: PetscFunctionBegin;
2270: if (rmapping) {
2271: PetscAssertPointer(rmapping, 2);
2272: *rmapping = A->rmap->mapping;
2273: }
2274: if (cmapping) {
2275: PetscAssertPointer(cmapping, 3);
2276: *cmapping = A->cmap->mapping;
2277: }
2278: PetscFunctionReturn(PETSC_SUCCESS);
2279: }
2281: /*@
2282: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2284: Logically Collective
2286: Input Parameters:
2287: + A - the matrix
2288: . rmap - row layout
2289: - cmap - column layout
2291: Level: advanced
2293: Note:
2294: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2296: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2297: @*/
2298: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2299: {
2300: PetscFunctionBegin;
2302: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2303: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2304: PetscFunctionReturn(PETSC_SUCCESS);
2305: }
2307: /*@
2308: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2310: Not Collective
2312: Input Parameter:
2313: . A - the matrix
2315: Output Parameters:
2316: + rmap - row layout
2317: - cmap - column layout
2319: Level: advanced
2321: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2322: @*/
2323: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2324: {
2325: PetscFunctionBegin;
2328: if (rmap) {
2329: PetscAssertPointer(rmap, 2);
2330: *rmap = A->rmap;
2331: }
2332: if (cmap) {
2333: PetscAssertPointer(cmap, 3);
2334: *cmap = A->cmap;
2335: }
2336: PetscFunctionReturn(PETSC_SUCCESS);
2337: }
2339: /*@C
2340: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2341: using a local numbering of the rows and columns.
2343: Not Collective
2345: Input Parameters:
2346: + mat - the matrix
2347: . nrow - number of rows
2348: . irow - the row local indices
2349: . ncol - number of columns
2350: . icol - the column local indices
2351: . y - a logically two-dimensional array of values
2352: - addv - either `INSERT_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2354: Level: intermediate
2356: Notes:
2357: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2359: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2360: options cannot be mixed without intervening calls to the assembly
2361: routines.
2363: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2364: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2366: Developer Note:
2367: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2368: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2370: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2371: `MatGetValuesLocal()`
2372: @*/
2373: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2374: {
2375: PetscFunctionBeginHot;
2378: MatCheckPreallocated(mat, 1);
2379: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2380: PetscAssertPointer(irow, 3);
2381: PetscAssertPointer(icol, 5);
2382: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2383: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2384: if (PetscDefined(USE_DEBUG)) {
2385: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2386: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2387: }
2389: if (mat->assembled) {
2390: mat->was_assembled = PETSC_TRUE;
2391: mat->assembled = PETSC_FALSE;
2392: }
2393: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2394: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2395: else {
2396: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2397: const PetscInt *irowm, *icolm;
2399: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2400: bufr = buf;
2401: bufc = buf + nrow;
2402: irowm = bufr;
2403: icolm = bufc;
2404: } else {
2405: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2406: irowm = bufr;
2407: icolm = bufc;
2408: }
2409: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2410: else irowm = irow;
2411: if (mat->cmap->mapping) {
2412: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2413: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2414: } else icolm = irowm;
2415: } else icolm = icol;
2416: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2417: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2418: }
2419: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2420: PetscFunctionReturn(PETSC_SUCCESS);
2421: }
2423: /*@C
2424: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2425: using a local ordering of the nodes a block at a time.
2427: Not Collective
2429: Input Parameters:
2430: + mat - the matrix
2431: . nrow - number of rows
2432: . irow - the row local indices
2433: . ncol - number of columns
2434: . icol - the column local indices
2435: . y - a logically two-dimensional array of values
2436: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2438: Level: intermediate
2440: Notes:
2441: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2442: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2444: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2445: options cannot be mixed without intervening calls to the assembly
2446: routines.
2448: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2449: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2451: Developer Note:
2452: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2453: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2455: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2456: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2457: @*/
2458: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2459: {
2460: PetscFunctionBeginHot;
2463: MatCheckPreallocated(mat, 1);
2464: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2465: PetscAssertPointer(irow, 3);
2466: PetscAssertPointer(icol, 5);
2467: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2468: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2469: if (PetscDefined(USE_DEBUG)) {
2470: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2471: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2472: }
2474: if (mat->assembled) {
2475: mat->was_assembled = PETSC_TRUE;
2476: mat->assembled = PETSC_FALSE;
2477: }
2478: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2479: PetscInt irbs, rbs;
2480: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2481: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2482: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2483: }
2484: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2485: PetscInt icbs, cbs;
2486: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2487: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2488: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2489: }
2490: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2491: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2492: else {
2493: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2494: const PetscInt *irowm, *icolm;
2496: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2497: bufr = buf;
2498: bufc = buf + nrow;
2499: irowm = bufr;
2500: icolm = bufc;
2501: } else {
2502: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2503: irowm = bufr;
2504: icolm = bufc;
2505: }
2506: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2507: else irowm = irow;
2508: if (mat->cmap->mapping) {
2509: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2510: PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2511: } else icolm = irowm;
2512: } else icolm = icol;
2513: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2514: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2515: }
2516: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2517: PetscFunctionReturn(PETSC_SUCCESS);
2518: }
2520: /*@
2521: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2523: Collective
2525: Input Parameters:
2526: + mat - the matrix
2527: - x - the vector to be multiplied
2529: Output Parameter:
2530: . y - the result
2532: Level: developer
2534: Note:
2535: The vectors `x` and `y` cannot be the same. I.e., one cannot
2536: call `MatMultDiagonalBlock`(A,y,y).
2538: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2539: @*/
2540: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2541: {
2542: PetscFunctionBegin;
2548: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2549: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2550: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2551: MatCheckPreallocated(mat, 1);
2553: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2554: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2555: PetscFunctionReturn(PETSC_SUCCESS);
2556: }
2558: /*@
2559: MatMult - Computes the matrix-vector product, $y = Ax$.
2561: Neighbor-wise Collective
2563: Input Parameters:
2564: + mat - the matrix
2565: - x - the vector to be multiplied
2567: Output Parameter:
2568: . y - the result
2570: Level: beginner
2572: Note:
2573: The vectors `x` and `y` cannot be the same. I.e., one cannot
2574: call `MatMult`(A,y,y).
2576: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2577: @*/
2578: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2579: {
2580: PetscFunctionBegin;
2584: VecCheckAssembled(x);
2586: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2587: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2588: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2589: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2590: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2591: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2592: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2593: PetscCall(VecSetErrorIfLocked(y, 3));
2594: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2595: MatCheckPreallocated(mat, 1);
2597: PetscCall(VecLockReadPush(x));
2598: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2599: PetscUseTypeMethod(mat, mult, x, y);
2600: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2601: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2602: PetscCall(VecLockReadPop(x));
2603: PetscFunctionReturn(PETSC_SUCCESS);
2604: }
2606: /*@
2607: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2609: Neighbor-wise Collective
2611: Input Parameters:
2612: + mat - the matrix
2613: - x - the vector to be multiplied
2615: Output Parameter:
2616: . y - the result
2618: Level: beginner
2620: Notes:
2621: The vectors `x` and `y` cannot be the same. I.e., one cannot
2622: call `MatMultTranspose`(A,y,y).
2624: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2625: use `MatMultHermitianTranspose()`
2627: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2628: @*/
2629: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2630: {
2631: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2633: PetscFunctionBegin;
2637: VecCheckAssembled(x);
2640: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2641: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2642: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2643: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2644: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2645: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2646: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2647: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2648: MatCheckPreallocated(mat, 1);
2650: if (!mat->ops->multtranspose) {
2651: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2652: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2653: } else op = mat->ops->multtranspose;
2654: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2655: PetscCall(VecLockReadPush(x));
2656: PetscCall((*op)(mat, x, y));
2657: PetscCall(VecLockReadPop(x));
2658: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2659: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2660: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2661: PetscFunctionReturn(PETSC_SUCCESS);
2662: }
2664: /*@
2665: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2667: Neighbor-wise Collective
2669: Input Parameters:
2670: + mat - the matrix
2671: - x - the vector to be multiplied
2673: Output Parameter:
2674: . y - the result
2676: Level: beginner
2678: Notes:
2679: The vectors `x` and `y` cannot be the same. I.e., one cannot
2680: call `MatMultHermitianTranspose`(A,y,y).
2682: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2684: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2686: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2687: @*/
2688: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2689: {
2690: PetscFunctionBegin;
2696: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2697: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2698: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2699: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2700: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2701: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2702: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2703: MatCheckPreallocated(mat, 1);
2705: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2706: #if defined(PETSC_USE_COMPLEX)
2707: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2708: PetscCall(VecLockReadPush(x));
2709: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2710: else PetscUseTypeMethod(mat, mult, x, y);
2711: PetscCall(VecLockReadPop(x));
2712: } else {
2713: Vec w;
2714: PetscCall(VecDuplicate(x, &w));
2715: PetscCall(VecCopy(x, w));
2716: PetscCall(VecConjugate(w));
2717: PetscCall(MatMultTranspose(mat, w, y));
2718: PetscCall(VecDestroy(&w));
2719: PetscCall(VecConjugate(y));
2720: }
2721: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2722: #else
2723: PetscCall(MatMultTranspose(mat, x, y));
2724: #endif
2725: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2726: PetscFunctionReturn(PETSC_SUCCESS);
2727: }
2729: /*@
2730: MatMultAdd - Computes $v3 = v2 + A * v1$.
2732: Neighbor-wise Collective
2734: Input Parameters:
2735: + mat - the matrix
2736: . v1 - the vector to be multiplied by `mat`
2737: - v2 - the vector to be added to the result
2739: Output Parameter:
2740: . v3 - the result
2742: Level: beginner
2744: Note:
2745: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2746: call `MatMultAdd`(A,v1,v2,v1).
2748: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2749: @*/
2750: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2751: {
2752: PetscFunctionBegin;
2759: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2760: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2761: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2762: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2763: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2764: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2765: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2766: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2767: MatCheckPreallocated(mat, 1);
2769: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2770: PetscCall(VecLockReadPush(v1));
2771: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2772: PetscCall(VecLockReadPop(v1));
2773: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2774: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2775: PetscFunctionReturn(PETSC_SUCCESS);
2776: }
2778: /*@
2779: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2781: Neighbor-wise Collective
2783: Input Parameters:
2784: + mat - the matrix
2785: . v1 - the vector to be multiplied by the transpose of the matrix
2786: - v2 - the vector to be added to the result
2788: Output Parameter:
2789: . v3 - the result
2791: Level: beginner
2793: Note:
2794: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2795: call `MatMultTransposeAdd`(A,v1,v2,v1).
2797: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2798: @*/
2799: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2800: {
2801: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2803: PetscFunctionBegin;
2810: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2811: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2812: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2813: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2814: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2815: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2816: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2817: MatCheckPreallocated(mat, 1);
2819: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2820: PetscCall(VecLockReadPush(v1));
2821: PetscCall((*op)(mat, v1, v2, v3));
2822: PetscCall(VecLockReadPop(v1));
2823: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2824: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2825: PetscFunctionReturn(PETSC_SUCCESS);
2826: }
2828: /*@
2829: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2831: Neighbor-wise Collective
2833: Input Parameters:
2834: + mat - the matrix
2835: . v1 - the vector to be multiplied by the Hermitian transpose
2836: - v2 - the vector to be added to the result
2838: Output Parameter:
2839: . v3 - the result
2841: Level: beginner
2843: Note:
2844: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2845: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2847: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2848: @*/
2849: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2850: {
2851: PetscFunctionBegin;
2858: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2859: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2860: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2861: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2862: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2863: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2864: MatCheckPreallocated(mat, 1);
2866: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2867: PetscCall(VecLockReadPush(v1));
2868: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2869: else {
2870: Vec w, z;
2871: PetscCall(VecDuplicate(v1, &w));
2872: PetscCall(VecCopy(v1, w));
2873: PetscCall(VecConjugate(w));
2874: PetscCall(VecDuplicate(v3, &z));
2875: PetscCall(MatMultTranspose(mat, w, z));
2876: PetscCall(VecDestroy(&w));
2877: PetscCall(VecConjugate(z));
2878: if (v2 != v3) {
2879: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2880: } else {
2881: PetscCall(VecAXPY(v3, 1.0, z));
2882: }
2883: PetscCall(VecDestroy(&z));
2884: }
2885: PetscCall(VecLockReadPop(v1));
2886: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2887: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2888: PetscFunctionReturn(PETSC_SUCCESS);
2889: }
2891: /*@C
2892: MatGetFactorType - gets the type of factorization a matrix is
2894: Not Collective
2896: Input Parameter:
2897: . mat - the matrix
2899: Output Parameter:
2900: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2902: Level: intermediate
2904: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2905: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2906: @*/
2907: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
2908: {
2909: PetscFunctionBegin;
2912: PetscAssertPointer(t, 2);
2913: *t = mat->factortype;
2914: PetscFunctionReturn(PETSC_SUCCESS);
2915: }
2917: /*@C
2918: MatSetFactorType - sets the type of factorization a matrix is
2920: Logically Collective
2922: Input Parameters:
2923: + mat - the matrix
2924: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2926: Level: intermediate
2928: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2929: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2930: @*/
2931: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
2932: {
2933: PetscFunctionBegin;
2936: mat->factortype = t;
2937: PetscFunctionReturn(PETSC_SUCCESS);
2938: }
2940: /*@C
2941: MatGetInfo - Returns information about matrix storage (number of
2942: nonzeros, memory, etc.).
2944: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
2946: Input Parameters:
2947: + mat - the matrix
2948: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
2950: Output Parameter:
2951: . info - matrix information context
2953: Options Database Key:
2954: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
2956: Notes:
2957: The `MatInfo` context contains a variety of matrix data, including
2958: number of nonzeros allocated and used, number of mallocs during
2959: matrix assembly, etc. Additional information for factored matrices
2960: is provided (such as the fill ratio, number of mallocs during
2961: factorization, etc.).
2963: Example:
2964: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
2965: data within the MatInfo context. For example,
2966: .vb
2967: MatInfo info;
2968: Mat A;
2969: double mal, nz_a, nz_u;
2971: MatGetInfo(A, MAT_LOCAL, &info);
2972: mal = info.mallocs;
2973: nz_a = info.nz_allocated;
2974: .ve
2976: Fortran users should declare info as a double precision
2977: array of dimension `MAT_INFO_SIZE`, and then extract the parameters
2978: of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
2979: a complete list of parameter names.
2980: .vb
2981: double precision info(MAT_INFO_SIZE)
2982: double precision mal, nz_a
2983: Mat A
2984: integer ierr
2986: call MatGetInfo(A, MAT_LOCAL, info, ierr)
2987: mal = info(MAT_INFO_MALLOCS)
2988: nz_a = info(MAT_INFO_NZ_ALLOCATED)
2989: .ve
2991: Level: intermediate
2993: Developer Note:
2994: The Fortran interface is not autogenerated as the
2995: interface definition cannot be generated correctly [due to `MatInfo` argument]
2997: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
2998: @*/
2999: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3000: {
3001: PetscFunctionBegin;
3004: PetscAssertPointer(info, 3);
3005: MatCheckPreallocated(mat, 1);
3006: PetscUseTypeMethod(mat, getinfo, flag, info);
3007: PetscFunctionReturn(PETSC_SUCCESS);
3008: }
3010: /*
3011: This is used by external packages where it is not easy to get the info from the actual
3012: matrix factorization.
3013: */
3014: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3015: {
3016: PetscFunctionBegin;
3017: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3018: PetscFunctionReturn(PETSC_SUCCESS);
3019: }
3021: /*@C
3022: MatLUFactor - Performs in-place LU factorization of matrix.
3024: Collective
3026: Input Parameters:
3027: + mat - the matrix
3028: . row - row permutation
3029: . col - column permutation
3030: - info - options for factorization, includes
3031: .vb
3032: fill - expected fill as ratio of original fill.
3033: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3034: Run with the option -info to determine an optimal value to use
3035: .ve
3037: Level: developer
3039: Notes:
3040: Most users should employ the `KSP` interface for linear solvers
3041: instead of working directly with matrix algebra routines such as this.
3042: See, e.g., `KSPCreate()`.
3044: This changes the state of the matrix to a factored matrix; it cannot be used
3045: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3047: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3048: when not using `KSP`.
3050: Developer Note:
3051: The Fortran interface is not autogenerated as the
3052: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3054: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3055: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3056: @*/
3057: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3058: {
3059: MatFactorInfo tinfo;
3061: PetscFunctionBegin;
3065: if (info) PetscAssertPointer(info, 4);
3067: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3068: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3069: MatCheckPreallocated(mat, 1);
3070: if (!info) {
3071: PetscCall(MatFactorInfoInitialize(&tinfo));
3072: info = &tinfo;
3073: }
3075: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3076: PetscUseTypeMethod(mat, lufactor, row, col, info);
3077: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3078: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3079: PetscFunctionReturn(PETSC_SUCCESS);
3080: }
3082: /*@C
3083: MatILUFactor - Performs in-place ILU factorization of matrix.
3085: Collective
3087: Input Parameters:
3088: + mat - the matrix
3089: . row - row permutation
3090: . col - column permutation
3091: - info - structure containing
3092: .vb
3093: levels - number of levels of fill.
3094: expected fill - as ratio of original fill.
3095: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3096: missing diagonal entries)
3097: .ve
3099: Level: developer
3101: Notes:
3102: Most users should employ the `KSP` interface for linear solvers
3103: instead of working directly with matrix algebra routines such as this.
3104: See, e.g., `KSPCreate()`.
3106: Probably really in-place only when level of fill is zero, otherwise allocates
3107: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3108: when not using `KSP`.
3110: Developer Note:
3111: The Fortran interface is not autogenerated as the
3112: interface definition cannot be generated correctly [due to MatFactorInfo]
3114: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3115: @*/
3116: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3117: {
3118: PetscFunctionBegin;
3122: PetscAssertPointer(info, 4);
3124: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3125: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3126: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3127: MatCheckPreallocated(mat, 1);
3129: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3130: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3131: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3132: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3133: PetscFunctionReturn(PETSC_SUCCESS);
3134: }
3136: /*@C
3137: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3138: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3140: Collective
3142: Input Parameters:
3143: + fact - the factor matrix obtained with `MatGetFactor()`
3144: . mat - the matrix
3145: . row - the row permutation
3146: . col - the column permutation
3147: - info - options for factorization, includes
3148: .vb
3149: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3150: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3151: .ve
3153: Level: developer
3155: Notes:
3156: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3158: Most users should employ the simplified `KSP` interface for linear solvers
3159: instead of working directly with matrix algebra routines such as this.
3160: See, e.g., `KSPCreate()`.
3162: Developer Note:
3163: The Fortran interface is not autogenerated as the
3164: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3166: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3167: @*/
3168: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3169: {
3170: MatFactorInfo tinfo;
3172: PetscFunctionBegin;
3177: if (info) PetscAssertPointer(info, 5);
3180: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3181: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3182: MatCheckPreallocated(mat, 2);
3183: if (!info) {
3184: PetscCall(MatFactorInfoInitialize(&tinfo));
3185: info = &tinfo;
3186: }
3188: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3189: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3190: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3191: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3192: PetscFunctionReturn(PETSC_SUCCESS);
3193: }
3195: /*@C
3196: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3197: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3199: Collective
3201: Input Parameters:
3202: + fact - the factor matrix obtained with `MatGetFactor()`
3203: . mat - the matrix
3204: - info - options for factorization
3206: Level: developer
3208: Notes:
3209: See `MatLUFactor()` for in-place factorization. See
3210: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3212: Most users should employ the `KSP` interface for linear solvers
3213: instead of working directly with matrix algebra routines such as this.
3214: See, e.g., `KSPCreate()`.
3216: Developer Note:
3217: The Fortran interface is not autogenerated as the
3218: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3220: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3221: @*/
3222: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3223: {
3224: MatFactorInfo tinfo;
3226: PetscFunctionBegin;
3231: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3232: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3233: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3235: MatCheckPreallocated(mat, 2);
3236: if (!info) {
3237: PetscCall(MatFactorInfoInitialize(&tinfo));
3238: info = &tinfo;
3239: }
3241: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3242: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3243: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3244: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3245: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3246: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3247: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3248: PetscFunctionReturn(PETSC_SUCCESS);
3249: }
3251: /*@C
3252: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3253: symmetric matrix.
3255: Collective
3257: Input Parameters:
3258: + mat - the matrix
3259: . perm - row and column permutations
3260: - info - expected fill as ratio of original fill
3262: Level: developer
3264: Notes:
3265: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3266: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3268: Most users should employ the `KSP` interface for linear solvers
3269: instead of working directly with matrix algebra routines such as this.
3270: See, e.g., `KSPCreate()`.
3272: Developer Note:
3273: The Fortran interface is not autogenerated as the
3274: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3276: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3277: `MatGetOrdering()`
3278: @*/
3279: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3280: {
3281: MatFactorInfo tinfo;
3283: PetscFunctionBegin;
3286: if (info) PetscAssertPointer(info, 3);
3288: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3289: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3290: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3291: MatCheckPreallocated(mat, 1);
3292: if (!info) {
3293: PetscCall(MatFactorInfoInitialize(&tinfo));
3294: info = &tinfo;
3295: }
3297: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3298: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3299: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3300: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3301: PetscFunctionReturn(PETSC_SUCCESS);
3302: }
3304: /*@C
3305: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3306: of a symmetric matrix.
3308: Collective
3310: Input Parameters:
3311: + fact - the factor matrix obtained with `MatGetFactor()`
3312: . mat - the matrix
3313: . perm - row and column permutations
3314: - info - options for factorization, includes
3315: .vb
3316: fill - expected fill as ratio of original fill.
3317: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3318: Run with the option -info to determine an optimal value to use
3319: .ve
3321: Level: developer
3323: Notes:
3324: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3325: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3327: Most users should employ the `KSP` interface for linear solvers
3328: instead of working directly with matrix algebra routines such as this.
3329: See, e.g., `KSPCreate()`.
3331: Developer Note:
3332: The Fortran interface is not autogenerated as the
3333: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3335: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3336: `MatGetOrdering()`
3337: @*/
3338: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3339: {
3340: MatFactorInfo tinfo;
3342: PetscFunctionBegin;
3346: if (info) PetscAssertPointer(info, 4);
3349: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3350: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3351: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3352: MatCheckPreallocated(mat, 2);
3353: if (!info) {
3354: PetscCall(MatFactorInfoInitialize(&tinfo));
3355: info = &tinfo;
3356: }
3358: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3359: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3360: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3361: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3362: PetscFunctionReturn(PETSC_SUCCESS);
3363: }
3365: /*@C
3366: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3367: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3368: `MatCholeskyFactorSymbolic()`.
3370: Collective
3372: Input Parameters:
3373: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3374: . mat - the initial matrix that is to be factored
3375: - info - options for factorization
3377: Level: developer
3379: Note:
3380: Most users should employ the `KSP` interface for linear solvers
3381: instead of working directly with matrix algebra routines such as this.
3382: See, e.g., `KSPCreate()`.
3384: Developer Note:
3385: The Fortran interface is not autogenerated as the
3386: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3388: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3389: @*/
3390: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3391: {
3392: MatFactorInfo tinfo;
3394: PetscFunctionBegin;
3399: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3400: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3401: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3402: MatCheckPreallocated(mat, 2);
3403: if (!info) {
3404: PetscCall(MatFactorInfoInitialize(&tinfo));
3405: info = &tinfo;
3406: }
3408: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3409: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3410: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3411: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3412: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3413: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3414: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3415: PetscFunctionReturn(PETSC_SUCCESS);
3416: }
3418: /*@
3419: MatQRFactor - Performs in-place QR factorization of matrix.
3421: Collective
3423: Input Parameters:
3424: + mat - the matrix
3425: . col - column permutation
3426: - info - options for factorization, includes
3427: .vb
3428: fill - expected fill as ratio of original fill.
3429: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3430: Run with the option -info to determine an optimal value to use
3431: .ve
3433: Level: developer
3435: Notes:
3436: Most users should employ the `KSP` interface for linear solvers
3437: instead of working directly with matrix algebra routines such as this.
3438: See, e.g., `KSPCreate()`.
3440: This changes the state of the matrix to a factored matrix; it cannot be used
3441: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3443: Developer Note:
3444: The Fortran interface is not autogenerated as the
3445: interface definition cannot be generated correctly [due to MatFactorInfo]
3447: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3448: `MatSetUnfactored()`
3449: @*/
3450: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3451: {
3452: PetscFunctionBegin;
3455: if (info) PetscAssertPointer(info, 3);
3457: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3458: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3459: MatCheckPreallocated(mat, 1);
3460: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3461: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3462: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3463: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3464: PetscFunctionReturn(PETSC_SUCCESS);
3465: }
3467: /*@
3468: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3469: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3471: Collective
3473: Input Parameters:
3474: + fact - the factor matrix obtained with `MatGetFactor()`
3475: . mat - the matrix
3476: . col - column permutation
3477: - info - options for factorization, includes
3478: .vb
3479: fill - expected fill as ratio of original fill.
3480: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3481: Run with the option -info to determine an optimal value to use
3482: .ve
3484: Level: developer
3486: Note:
3487: Most users should employ the `KSP` interface for linear solvers
3488: instead of working directly with matrix algebra routines such as this.
3489: See, e.g., `KSPCreate()`.
3491: Developer Note:
3492: The Fortran interface is not autogenerated as the
3493: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3495: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3496: @*/
3497: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3498: {
3499: MatFactorInfo tinfo;
3501: PetscFunctionBegin;
3505: if (info) PetscAssertPointer(info, 4);
3508: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3509: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3510: MatCheckPreallocated(mat, 2);
3511: if (!info) {
3512: PetscCall(MatFactorInfoInitialize(&tinfo));
3513: info = &tinfo;
3514: }
3516: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3517: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3518: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3519: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3520: PetscFunctionReturn(PETSC_SUCCESS);
3521: }
3523: /*@
3524: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3525: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3527: Collective
3529: Input Parameters:
3530: + fact - the factor matrix obtained with `MatGetFactor()`
3531: . mat - the matrix
3532: - info - options for factorization
3534: Level: developer
3536: Notes:
3537: See `MatQRFactor()` for in-place factorization.
3539: Most users should employ the `KSP` interface for linear solvers
3540: instead of working directly with matrix algebra routines such as this.
3541: See, e.g., `KSPCreate()`.
3543: Developer Note:
3544: The Fortran interface is not autogenerated as the
3545: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3547: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3548: @*/
3549: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3550: {
3551: MatFactorInfo tinfo;
3553: PetscFunctionBegin;
3558: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3559: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3560: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3562: MatCheckPreallocated(mat, 2);
3563: if (!info) {
3564: PetscCall(MatFactorInfoInitialize(&tinfo));
3565: info = &tinfo;
3566: }
3568: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3569: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3570: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3571: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3572: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3573: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3574: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3575: PetscFunctionReturn(PETSC_SUCCESS);
3576: }
3578: /*@
3579: MatSolve - Solves $A x = b$, given a factored matrix.
3581: Neighbor-wise Collective
3583: Input Parameters:
3584: + mat - the factored matrix
3585: - b - the right-hand-side vector
3587: Output Parameter:
3588: . x - the result vector
3590: Level: developer
3592: Notes:
3593: The vectors `b` and `x` cannot be the same. I.e., one cannot
3594: call `MatSolve`(A,x,x).
3596: Most users should employ the `KSP` interface for linear solvers
3597: instead of working directly with matrix algebra routines such as this.
3598: See, e.g., `KSPCreate()`.
3600: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3601: @*/
3602: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3603: {
3604: PetscFunctionBegin;
3609: PetscCheckSameComm(mat, 1, b, 2);
3610: PetscCheckSameComm(mat, 1, x, 3);
3611: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3612: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3613: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3614: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3615: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3616: MatCheckPreallocated(mat, 1);
3618: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3619: if (mat->factorerrortype) {
3620: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3621: PetscCall(VecSetInf(x));
3622: } else PetscUseTypeMethod(mat, solve, b, x);
3623: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3624: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3625: PetscFunctionReturn(PETSC_SUCCESS);
3626: }
3628: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3629: {
3630: Vec b, x;
3631: PetscInt N, i;
3632: PetscErrorCode (*f)(Mat, Vec, Vec);
3633: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3635: PetscFunctionBegin;
3636: if (A->factorerrortype) {
3637: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3638: PetscCall(MatSetInf(X));
3639: PetscFunctionReturn(PETSC_SUCCESS);
3640: }
3641: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3642: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3643: PetscCall(MatBoundToCPU(A, &Abound));
3644: if (!Abound) {
3645: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3646: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3647: }
3648: #if PetscDefined(HAVE_CUDA)
3649: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3650: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3651: #elif PetscDefined(HAVE_HIP)
3652: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3653: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3654: #endif
3655: PetscCall(MatGetSize(B, NULL, &N));
3656: for (i = 0; i < N; i++) {
3657: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3658: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3659: PetscCall((*f)(A, b, x));
3660: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3661: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3662: }
3663: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3664: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3665: PetscFunctionReturn(PETSC_SUCCESS);
3666: }
3668: /*@
3669: MatMatSolve - Solves $A X = B$, given a factored matrix.
3671: Neighbor-wise Collective
3673: Input Parameters:
3674: + A - the factored matrix
3675: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3677: Output Parameter:
3678: . X - the result matrix (dense matrix)
3680: Level: developer
3682: Note:
3683: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3684: otherwise, `B` and `X` cannot be the same.
3686: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3687: @*/
3688: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3689: {
3690: PetscFunctionBegin;
3695: PetscCheckSameComm(A, 1, B, 2);
3696: PetscCheckSameComm(A, 1, X, 3);
3697: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3698: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3699: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3700: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3701: MatCheckPreallocated(A, 1);
3703: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3704: if (!A->ops->matsolve) {
3705: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3706: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3707: } else PetscUseTypeMethod(A, matsolve, B, X);
3708: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3709: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3710: PetscFunctionReturn(PETSC_SUCCESS);
3711: }
3713: /*@
3714: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3716: Neighbor-wise Collective
3718: Input Parameters:
3719: + A - the factored matrix
3720: - B - the right-hand-side matrix (`MATDENSE` matrix)
3722: Output Parameter:
3723: . X - the result matrix (dense matrix)
3725: Level: developer
3727: Note:
3728: The matrices `B` and `X` cannot be the same. I.e., one cannot
3729: call `MatMatSolveTranspose`(A,X,X).
3731: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3732: @*/
3733: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3734: {
3735: PetscFunctionBegin;
3740: PetscCheckSameComm(A, 1, B, 2);
3741: PetscCheckSameComm(A, 1, X, 3);
3742: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3743: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3744: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3745: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3746: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3747: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3748: MatCheckPreallocated(A, 1);
3750: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3751: if (!A->ops->matsolvetranspose) {
3752: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3753: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3754: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3755: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3756: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3757: PetscFunctionReturn(PETSC_SUCCESS);
3758: }
3760: /*@
3761: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3763: Neighbor-wise Collective
3765: Input Parameters:
3766: + A - the factored matrix
3767: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3769: Output Parameter:
3770: . X - the result matrix (dense matrix)
3772: Level: developer
3774: Note:
3775: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right hand side matrix. User must create `Bt` in sparse compressed row
3776: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3778: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3779: @*/
3780: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3781: {
3782: PetscFunctionBegin;
3787: PetscCheckSameComm(A, 1, Bt, 2);
3788: PetscCheckSameComm(A, 1, X, 3);
3790: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3791: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3792: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3793: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3794: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3795: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3796: MatCheckPreallocated(A, 1);
3798: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3799: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3800: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3801: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3802: PetscFunctionReturn(PETSC_SUCCESS);
3803: }
3805: /*@
3806: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3807: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3809: Neighbor-wise Collective
3811: Input Parameters:
3812: + mat - the factored matrix
3813: - b - the right-hand-side vector
3815: Output Parameter:
3816: . x - the result vector
3818: Level: developer
3820: Notes:
3821: `MatSolve()` should be used for most applications, as it performs
3822: a forward solve followed by a backward solve.
3824: The vectors `b` and `x` cannot be the same, i.e., one cannot
3825: call `MatForwardSolve`(A,x,x).
3827: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3828: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3829: `MatForwardSolve()` solves $U^T*D y = b$, and
3830: `MatBackwardSolve()` solves $U x = y$.
3831: Thus they do not provide a symmetric preconditioner.
3833: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3834: @*/
3835: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3836: {
3837: PetscFunctionBegin;
3842: PetscCheckSameComm(mat, 1, b, 2);
3843: PetscCheckSameComm(mat, 1, x, 3);
3844: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3845: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3846: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3847: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3848: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3849: MatCheckPreallocated(mat, 1);
3851: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3852: PetscUseTypeMethod(mat, forwardsolve, b, x);
3853: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3854: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3855: PetscFunctionReturn(PETSC_SUCCESS);
3856: }
3858: /*@
3859: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3860: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3862: Neighbor-wise Collective
3864: Input Parameters:
3865: + mat - the factored matrix
3866: - b - the right-hand-side vector
3868: Output Parameter:
3869: . x - the result vector
3871: Level: developer
3873: Notes:
3874: `MatSolve()` should be used for most applications, as it performs
3875: a forward solve followed by a backward solve.
3877: The vectors `b` and `x` cannot be the same. I.e., one cannot
3878: call `MatBackwardSolve`(A,x,x).
3880: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3881: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3882: `MatForwardSolve()` solves $U^T*D y = b$, and
3883: `MatBackwardSolve()` solves $U x = y$.
3884: Thus they do not provide a symmetric preconditioner.
3886: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3887: @*/
3888: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3889: {
3890: PetscFunctionBegin;
3895: PetscCheckSameComm(mat, 1, b, 2);
3896: PetscCheckSameComm(mat, 1, x, 3);
3897: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3898: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3899: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3900: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3901: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3902: MatCheckPreallocated(mat, 1);
3904: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3905: PetscUseTypeMethod(mat, backwardsolve, b, x);
3906: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3907: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3908: PetscFunctionReturn(PETSC_SUCCESS);
3909: }
3911: /*@
3912: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
3914: Neighbor-wise Collective
3916: Input Parameters:
3917: + mat - the factored matrix
3918: . b - the right-hand-side vector
3919: - y - the vector to be added to
3921: Output Parameter:
3922: . x - the result vector
3924: Level: developer
3926: Note:
3927: The vectors `b` and `x` cannot be the same. I.e., one cannot
3928: call `MatSolveAdd`(A,x,y,x).
3930: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3931: @*/
3932: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
3933: {
3934: PetscScalar one = 1.0;
3935: Vec tmp;
3937: PetscFunctionBegin;
3943: PetscCheckSameComm(mat, 1, b, 2);
3944: PetscCheckSameComm(mat, 1, y, 3);
3945: PetscCheckSameComm(mat, 1, x, 4);
3946: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3947: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3948: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3949: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
3950: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3951: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
3952: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3953: MatCheckPreallocated(mat, 1);
3955: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
3956: if (mat->factorerrortype) {
3957: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3958: PetscCall(VecSetInf(x));
3959: } else if (mat->ops->solveadd) {
3960: PetscUseTypeMethod(mat, solveadd, b, y, x);
3961: } else {
3962: /* do the solve then the add manually */
3963: if (x != y) {
3964: PetscCall(MatSolve(mat, b, x));
3965: PetscCall(VecAXPY(x, one, y));
3966: } else {
3967: PetscCall(VecDuplicate(x, &tmp));
3968: PetscCall(VecCopy(x, tmp));
3969: PetscCall(MatSolve(mat, b, x));
3970: PetscCall(VecAXPY(x, one, tmp));
3971: PetscCall(VecDestroy(&tmp));
3972: }
3973: }
3974: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
3975: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3976: PetscFunctionReturn(PETSC_SUCCESS);
3977: }
3979: /*@
3980: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
3982: Neighbor-wise Collective
3984: Input Parameters:
3985: + mat - the factored matrix
3986: - b - the right-hand-side vector
3988: Output Parameter:
3989: . x - the result vector
3991: Level: developer
3993: Notes:
3994: The vectors `b` and `x` cannot be the same. I.e., one cannot
3995: call `MatSolveTranspose`(A,x,x).
3997: Most users should employ the `KSP` interface for linear solvers
3998: instead of working directly with matrix algebra routines such as this.
3999: See, e.g., `KSPCreate()`.
4001: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4002: @*/
4003: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4004: {
4005: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4007: PetscFunctionBegin;
4012: PetscCheckSameComm(mat, 1, b, 2);
4013: PetscCheckSameComm(mat, 1, x, 3);
4014: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4015: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4016: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4017: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4018: MatCheckPreallocated(mat, 1);
4019: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4020: if (mat->factorerrortype) {
4021: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4022: PetscCall(VecSetInf(x));
4023: } else {
4024: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4025: PetscCall((*f)(mat, b, x));
4026: }
4027: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4028: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4029: PetscFunctionReturn(PETSC_SUCCESS);
4030: }
4032: /*@
4033: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4034: factored matrix.
4036: Neighbor-wise Collective
4038: Input Parameters:
4039: + mat - the factored matrix
4040: . b - the right-hand-side vector
4041: - y - the vector to be added to
4043: Output Parameter:
4044: . x - the result vector
4046: Level: developer
4048: Note:
4049: The vectors `b` and `x` cannot be the same. I.e., one cannot
4050: call `MatSolveTransposeAdd`(A,x,y,x).
4052: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4053: @*/
4054: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4055: {
4056: PetscScalar one = 1.0;
4057: Vec tmp;
4058: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4060: PetscFunctionBegin;
4066: PetscCheckSameComm(mat, 1, b, 2);
4067: PetscCheckSameComm(mat, 1, y, 3);
4068: PetscCheckSameComm(mat, 1, x, 4);
4069: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4070: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4071: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4072: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4073: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4074: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4075: MatCheckPreallocated(mat, 1);
4077: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4078: if (mat->factorerrortype) {
4079: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4080: PetscCall(VecSetInf(x));
4081: } else if (f) {
4082: PetscCall((*f)(mat, b, y, x));
4083: } else {
4084: /* do the solve then the add manually */
4085: if (x != y) {
4086: PetscCall(MatSolveTranspose(mat, b, x));
4087: PetscCall(VecAXPY(x, one, y));
4088: } else {
4089: PetscCall(VecDuplicate(x, &tmp));
4090: PetscCall(VecCopy(x, tmp));
4091: PetscCall(MatSolveTranspose(mat, b, x));
4092: PetscCall(VecAXPY(x, one, tmp));
4093: PetscCall(VecDestroy(&tmp));
4094: }
4095: }
4096: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4097: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4098: PetscFunctionReturn(PETSC_SUCCESS);
4099: }
4101: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4102: /*@
4103: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4105: Neighbor-wise Collective
4107: Input Parameters:
4108: + mat - the matrix
4109: . b - the right hand side
4110: . omega - the relaxation factor
4111: . flag - flag indicating the type of SOR (see below)
4112: . shift - diagonal shift
4113: . its - the number of iterations
4114: - lits - the number of local iterations
4116: Output Parameter:
4117: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4119: SOR Flags:
4120: + `SOR_FORWARD_SWEEP` - forward SOR
4121: . `SOR_BACKWARD_SWEEP` - backward SOR
4122: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4123: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4124: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4125: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4126: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4127: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4128: upper/lower triangular part of matrix to
4129: vector (with omega)
4130: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4132: Level: developer
4134: Notes:
4135: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4136: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4137: on each processor.
4139: Application programmers will not generally use `MatSOR()` directly,
4140: but instead will employ the `KSP`/`PC` interface.
4142: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4144: Most users should employ the `KSP` interface for linear solvers
4145: instead of working directly with matrix algebra routines such as this.
4146: See, e.g., `KSPCreate()`.
4148: Vectors `x` and `b` CANNOT be the same
4150: The flags are implemented as bitwise inclusive or operations.
4151: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4152: to specify a zero initial guess for SSOR.
4154: Developer Note:
4155: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4157: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4158: @*/
4159: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4160: {
4161: PetscFunctionBegin;
4166: PetscCheckSameComm(mat, 1, b, 2);
4167: PetscCheckSameComm(mat, 1, x, 8);
4168: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4169: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4170: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4171: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4172: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4173: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4174: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4175: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4177: MatCheckPreallocated(mat, 1);
4178: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4179: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4180: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4181: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4182: PetscFunctionReturn(PETSC_SUCCESS);
4183: }
4185: /*
4186: Default matrix copy routine.
4187: */
4188: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4189: {
4190: PetscInt i, rstart = 0, rend = 0, nz;
4191: const PetscInt *cwork;
4192: const PetscScalar *vwork;
4194: PetscFunctionBegin;
4195: if (B->assembled) PetscCall(MatZeroEntries(B));
4196: if (str == SAME_NONZERO_PATTERN) {
4197: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4198: for (i = rstart; i < rend; i++) {
4199: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4200: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4201: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4202: }
4203: } else {
4204: PetscCall(MatAYPX(B, 0.0, A, str));
4205: }
4206: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4207: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4208: PetscFunctionReturn(PETSC_SUCCESS);
4209: }
4211: /*@
4212: MatCopy - Copies a matrix to another matrix.
4214: Collective
4216: Input Parameters:
4217: + A - the matrix
4218: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4220: Output Parameter:
4221: . B - where the copy is put
4223: Level: intermediate
4225: Notes:
4226: If you use `SAME_NONZERO_PATTERN` then the two matrices must have the same nonzero pattern or the routine will crash.
4228: `MatCopy()` copies the matrix entries of a matrix to another existing
4229: matrix (after first zeroing the second matrix). A related routine is
4230: `MatConvert()`, which first creates a new matrix and then copies the data.
4232: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4233: @*/
4234: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4235: {
4236: PetscInt i;
4238: PetscFunctionBegin;
4243: PetscCheckSameComm(A, 1, B, 2);
4244: MatCheckPreallocated(B, 2);
4245: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4246: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4247: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4248: A->cmap->N, B->cmap->N);
4249: MatCheckPreallocated(A, 1);
4250: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4252: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4253: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4254: else PetscCall(MatCopy_Basic(A, B, str));
4256: B->stencil.dim = A->stencil.dim;
4257: B->stencil.noc = A->stencil.noc;
4258: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4259: B->stencil.dims[i] = A->stencil.dims[i];
4260: B->stencil.starts[i] = A->stencil.starts[i];
4261: }
4263: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4264: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4265: PetscFunctionReturn(PETSC_SUCCESS);
4266: }
4268: /*@C
4269: MatConvert - Converts a matrix to another matrix, either of the same
4270: or different type.
4272: Collective
4274: Input Parameters:
4275: + mat - the matrix
4276: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4277: same type as the original matrix.
4278: - reuse - denotes if the destination matrix is to be created or reused.
4279: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4280: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4282: Output Parameter:
4283: . M - pointer to place new matrix
4285: Level: intermediate
4287: Notes:
4288: `MatConvert()` first creates a new matrix and then copies the data from
4289: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4290: entries of one matrix to another already existing matrix context.
4292: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4293: the MPI communicator of the generated matrix is always the same as the communicator
4294: of the input matrix.
4296: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4297: @*/
4298: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4299: {
4300: PetscBool sametype, issame, flg;
4301: PetscBool3 issymmetric, ishermitian;
4302: char convname[256], mtype[256];
4303: Mat B;
4305: PetscFunctionBegin;
4308: PetscAssertPointer(M, 4);
4309: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4310: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4311: MatCheckPreallocated(mat, 1);
4313: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4314: if (flg) newtype = mtype;
4316: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4317: PetscCall(PetscStrcmp(newtype, "same", &issame));
4318: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4319: if (reuse == MAT_REUSE_MATRIX) {
4321: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4322: }
4324: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4325: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4326: PetscFunctionReturn(PETSC_SUCCESS);
4327: }
4329: /* Cache Mat options because some converters use MatHeaderReplace */
4330: issymmetric = mat->symmetric;
4331: ishermitian = mat->hermitian;
4333: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4334: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4335: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4336: } else {
4337: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4338: const char *prefix[3] = {"seq", "mpi", ""};
4339: PetscInt i;
4340: /*
4341: Order of precedence:
4342: 0) See if newtype is a superclass of the current matrix.
4343: 1) See if a specialized converter is known to the current matrix.
4344: 2) See if a specialized converter is known to the desired matrix class.
4345: 3) See if a good general converter is registered for the desired class
4346: (as of 6/27/03 only MATMPIADJ falls into this category).
4347: 4) See if a good general converter is known for the current matrix.
4348: 5) Use a really basic converter.
4349: */
4351: /* 0) See if newtype is a superclass of the current matrix.
4352: i.e mat is mpiaij and newtype is aij */
4353: for (i = 0; i < 2; i++) {
4354: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4355: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4356: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4357: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4358: if (flg) {
4359: if (reuse == MAT_INPLACE_MATRIX) {
4360: PetscCall(PetscInfo(mat, "Early return\n"));
4361: PetscFunctionReturn(PETSC_SUCCESS);
4362: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4363: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4364: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4365: PetscFunctionReturn(PETSC_SUCCESS);
4366: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4367: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4368: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4369: PetscFunctionReturn(PETSC_SUCCESS);
4370: }
4371: }
4372: }
4373: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4374: for (i = 0; i < 3; i++) {
4375: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4376: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4377: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4378: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4379: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4380: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4381: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4382: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4383: if (conv) goto foundconv;
4384: }
4386: /* 2) See if a specialized converter is known to the desired matrix class. */
4387: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4388: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4389: PetscCall(MatSetType(B, newtype));
4390: for (i = 0; i < 3; i++) {
4391: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4392: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4393: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4394: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4395: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4396: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4397: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4398: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4399: if (conv) {
4400: PetscCall(MatDestroy(&B));
4401: goto foundconv;
4402: }
4403: }
4405: /* 3) See if a good general converter is registered for the desired class */
4406: conv = B->ops->convertfrom;
4407: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4408: PetscCall(MatDestroy(&B));
4409: if (conv) goto foundconv;
4411: /* 4) See if a good general converter is known for the current matrix */
4412: if (mat->ops->convert) conv = mat->ops->convert;
4413: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4414: if (conv) goto foundconv;
4416: /* 5) Use a really basic converter. */
4417: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4418: conv = MatConvert_Basic;
4420: foundconv:
4421: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4422: PetscCall((*conv)(mat, newtype, reuse, M));
4423: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4424: /* the block sizes must be same if the mappings are copied over */
4425: (*M)->rmap->bs = mat->rmap->bs;
4426: (*M)->cmap->bs = mat->cmap->bs;
4427: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4428: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4429: (*M)->rmap->mapping = mat->rmap->mapping;
4430: (*M)->cmap->mapping = mat->cmap->mapping;
4431: }
4432: (*M)->stencil.dim = mat->stencil.dim;
4433: (*M)->stencil.noc = mat->stencil.noc;
4434: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4435: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4436: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4437: }
4438: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4439: }
4440: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4442: /* Copy Mat options */
4443: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4444: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4445: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4446: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4447: PetscFunctionReturn(PETSC_SUCCESS);
4448: }
4450: /*@C
4451: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4453: Not Collective
4455: Input Parameter:
4456: . mat - the matrix, must be a factored matrix
4458: Output Parameter:
4459: . type - the string name of the package (do not free this string)
4461: Level: intermediate
4463: Fortran Note:
4464: Pass in an empty string and the package name will be copied into it. Make sure the string is long enough.
4466: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4467: @*/
4468: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4469: {
4470: PetscErrorCode (*conv)(Mat, MatSolverType *);
4472: PetscFunctionBegin;
4475: PetscAssertPointer(type, 2);
4476: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4477: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4478: if (conv) PetscCall((*conv)(mat, type));
4479: else *type = MATSOLVERPETSC;
4480: PetscFunctionReturn(PETSC_SUCCESS);
4481: }
4483: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4484: struct _MatSolverTypeForSpecifcType {
4485: MatType mtype;
4486: /* no entry for MAT_FACTOR_NONE */
4487: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4488: MatSolverTypeForSpecifcType next;
4489: };
4491: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4492: struct _MatSolverTypeHolder {
4493: char *name;
4494: MatSolverTypeForSpecifcType handlers;
4495: MatSolverTypeHolder next;
4496: };
4498: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4500: /*@C
4501: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4503: Input Parameters:
4504: + package - name of the package, for example petsc or superlu
4505: . mtype - the matrix type that works with this package
4506: . ftype - the type of factorization supported by the package
4507: - createfactor - routine that will create the factored matrix ready to be used
4509: Level: developer
4511: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`
4512: @*/
4513: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4514: {
4515: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4516: PetscBool flg;
4517: MatSolverTypeForSpecifcType inext, iprev = NULL;
4519: PetscFunctionBegin;
4520: PetscCall(MatInitializePackage());
4521: if (!next) {
4522: PetscCall(PetscNew(&MatSolverTypeHolders));
4523: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4524: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4525: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4526: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4527: PetscFunctionReturn(PETSC_SUCCESS);
4528: }
4529: while (next) {
4530: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4531: if (flg) {
4532: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4533: inext = next->handlers;
4534: while (inext) {
4535: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4536: if (flg) {
4537: inext->createfactor[(int)ftype - 1] = createfactor;
4538: PetscFunctionReturn(PETSC_SUCCESS);
4539: }
4540: iprev = inext;
4541: inext = inext->next;
4542: }
4543: PetscCall(PetscNew(&iprev->next));
4544: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4545: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4546: PetscFunctionReturn(PETSC_SUCCESS);
4547: }
4548: prev = next;
4549: next = next->next;
4550: }
4551: PetscCall(PetscNew(&prev->next));
4552: PetscCall(PetscStrallocpy(package, &prev->next->name));
4553: PetscCall(PetscNew(&prev->next->handlers));
4554: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4555: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4556: PetscFunctionReturn(PETSC_SUCCESS);
4557: }
4559: /*@C
4560: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4562: Input Parameters:
4563: + type - name of the package, for example petsc or superlu
4564: . ftype - the type of factorization supported by the type
4565: - mtype - the matrix type that works with this type
4567: Output Parameters:
4568: + foundtype - `PETSC_TRUE` if the type was registered
4569: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4570: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4572: Level: developer
4574: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`
4575: @*/
4576: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat, MatFactorType, Mat *))
4577: {
4578: MatSolverTypeHolder next = MatSolverTypeHolders;
4579: PetscBool flg;
4580: MatSolverTypeForSpecifcType inext;
4582: PetscFunctionBegin;
4583: if (foundtype) *foundtype = PETSC_FALSE;
4584: if (foundmtype) *foundmtype = PETSC_FALSE;
4585: if (createfactor) *createfactor = NULL;
4587: if (type) {
4588: while (next) {
4589: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4590: if (flg) {
4591: if (foundtype) *foundtype = PETSC_TRUE;
4592: inext = next->handlers;
4593: while (inext) {
4594: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4595: if (flg) {
4596: if (foundmtype) *foundmtype = PETSC_TRUE;
4597: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4598: PetscFunctionReturn(PETSC_SUCCESS);
4599: }
4600: inext = inext->next;
4601: }
4602: }
4603: next = next->next;
4604: }
4605: } else {
4606: while (next) {
4607: inext = next->handlers;
4608: while (inext) {
4609: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4610: if (flg && inext->createfactor[(int)ftype - 1]) {
4611: if (foundtype) *foundtype = PETSC_TRUE;
4612: if (foundmtype) *foundmtype = PETSC_TRUE;
4613: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4614: PetscFunctionReturn(PETSC_SUCCESS);
4615: }
4616: inext = inext->next;
4617: }
4618: next = next->next;
4619: }
4620: /* try with base classes inext->mtype */
4621: next = MatSolverTypeHolders;
4622: while (next) {
4623: inext = next->handlers;
4624: while (inext) {
4625: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4626: if (flg && inext->createfactor[(int)ftype - 1]) {
4627: if (foundtype) *foundtype = PETSC_TRUE;
4628: if (foundmtype) *foundmtype = PETSC_TRUE;
4629: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4630: PetscFunctionReturn(PETSC_SUCCESS);
4631: }
4632: inext = inext->next;
4633: }
4634: next = next->next;
4635: }
4636: }
4637: PetscFunctionReturn(PETSC_SUCCESS);
4638: }
4640: PetscErrorCode MatSolverTypeDestroy(void)
4641: {
4642: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4643: MatSolverTypeForSpecifcType inext, iprev;
4645: PetscFunctionBegin;
4646: while (next) {
4647: PetscCall(PetscFree(next->name));
4648: inext = next->handlers;
4649: while (inext) {
4650: PetscCall(PetscFree(inext->mtype));
4651: iprev = inext;
4652: inext = inext->next;
4653: PetscCall(PetscFree(iprev));
4654: }
4655: prev = next;
4656: next = next->next;
4657: PetscCall(PetscFree(prev));
4658: }
4659: MatSolverTypeHolders = NULL;
4660: PetscFunctionReturn(PETSC_SUCCESS);
4661: }
4663: /*@C
4664: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4666: Logically Collective
4668: Input Parameter:
4669: . mat - the matrix
4671: Output Parameter:
4672: . flg - `PETSC_TRUE` if uses the ordering
4674: Level: developer
4676: Note:
4677: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4678: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4680: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4681: @*/
4682: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4683: {
4684: PetscFunctionBegin;
4685: *flg = mat->canuseordering;
4686: PetscFunctionReturn(PETSC_SUCCESS);
4687: }
4689: /*@C
4690: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4692: Logically Collective
4694: Input Parameters:
4695: + mat - the matrix obtained with `MatGetFactor()`
4696: - ftype - the factorization type to be used
4698: Output Parameter:
4699: . otype - the preferred ordering type
4701: Level: developer
4703: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4704: @*/
4705: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4706: {
4707: PetscFunctionBegin;
4708: *otype = mat->preferredordering[ftype];
4709: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4710: PetscFunctionReturn(PETSC_SUCCESS);
4711: }
4713: /*@C
4714: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic()
4716: Collective
4718: Input Parameters:
4719: + mat - the matrix
4720: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4721: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4723: Output Parameter:
4724: . f - the factor matrix used with MatXXFactorSymbolic() calls. Can be `NULL` in some cases, see notes below.
4726: Options Database Key:
4727: . -mat_factor_bind_factorization <host, device> - Where to do matrix factorization? Default is device (might consume more device memory.
4728: One can choose host to save device memory). Currently only supported with `MATSEQAIJCUSPARSE` matrices.
4730: Level: intermediate
4732: Notes:
4733: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4734: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4736: Users usually access the factorization solvers via `KSP`
4738: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4739: such as pastix, superlu, mumps etc.
4741: PETSc must have been ./configure to use the external solver, using the option --download-package
4743: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4744: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4745: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4747: Developer Note:
4748: This should actually be called `MatCreateFactor()` since it creates a new factor object
4750: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`,
4751: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4752: @*/
4753: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4754: {
4755: PetscBool foundtype, foundmtype;
4756: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4758: PetscFunctionBegin;
4762: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4763: MatCheckPreallocated(mat, 1);
4765: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4766: if (!foundtype) {
4767: if (type) {
4768: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4769: ((PetscObject)mat)->type_name, type);
4770: } else {
4771: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4772: }
4773: }
4774: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4775: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4777: PetscCall((*conv)(mat, ftype, f));
4778: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4779: PetscFunctionReturn(PETSC_SUCCESS);
4780: }
4782: /*@C
4783: MatGetFactorAvailable - Returns a a flag if matrix supports particular type and factor type
4785: Not Collective
4787: Input Parameters:
4788: + mat - the matrix
4789: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4790: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4792: Output Parameter:
4793: . flg - PETSC_TRUE if the factorization is available
4795: Level: intermediate
4797: Notes:
4798: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4799: such as pastix, superlu, mumps etc.
4801: PETSc must have been ./configure to use the external solver, using the option --download-package
4803: Developer Note:
4804: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4806: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4807: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4808: @*/
4809: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4810: {
4811: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4813: PetscFunctionBegin;
4816: PetscAssertPointer(flg, 4);
4818: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4819: MatCheckPreallocated(mat, 1);
4821: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4822: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4823: PetscFunctionReturn(PETSC_SUCCESS);
4824: }
4826: /*@
4827: MatDuplicate - Duplicates a matrix including the non-zero structure.
4829: Collective
4831: Input Parameters:
4832: + mat - the matrix
4833: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4834: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4836: Output Parameter:
4837: . M - pointer to place new matrix
4839: Level: intermediate
4841: Notes:
4842: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
4844: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
4846: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4848: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4849: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4850: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
4852: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4853: @*/
4854: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4855: {
4856: Mat B;
4857: VecType vtype;
4858: PetscInt i;
4859: PetscObject dm, container_h, container_d;
4860: void (*viewf)(void);
4862: PetscFunctionBegin;
4865: PetscAssertPointer(M, 3);
4866: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4867: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4868: MatCheckPreallocated(mat, 1);
4870: *M = NULL;
4871: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4872: PetscUseTypeMethod(mat, duplicate, op, M);
4873: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4874: B = *M;
4876: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4877: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4878: PetscCall(MatGetVecType(mat, &vtype));
4879: PetscCall(MatSetVecType(B, vtype));
4881: B->stencil.dim = mat->stencil.dim;
4882: B->stencil.noc = mat->stencil.noc;
4883: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4884: B->stencil.dims[i] = mat->stencil.dims[i];
4885: B->stencil.starts[i] = mat->stencil.starts[i];
4886: }
4888: B->nooffproczerorows = mat->nooffproczerorows;
4889: B->nooffprocentries = mat->nooffprocentries;
4891: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4892: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4893: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
4894: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
4895: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
4896: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
4897: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4898: PetscFunctionReturn(PETSC_SUCCESS);
4899: }
4901: /*@
4902: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
4904: Logically Collective
4906: Input Parameter:
4907: . mat - the matrix
4909: Output Parameter:
4910: . v - the diagonal of the matrix
4912: Level: intermediate
4914: Note:
4915: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
4916: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
4917: is larger than `ndiag`, the values of the remaining entries are unspecified.
4919: Currently only correct in parallel for square matrices.
4921: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
4922: @*/
4923: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
4924: {
4925: PetscFunctionBegin;
4929: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4930: MatCheckPreallocated(mat, 1);
4931: if (PetscDefined(USE_DEBUG)) {
4932: PetscInt nv, row, col, ndiag;
4934: PetscCall(VecGetLocalSize(v, &nv));
4935: PetscCall(MatGetLocalSize(mat, &row, &col));
4936: ndiag = PetscMin(row, col);
4937: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
4938: }
4940: PetscUseTypeMethod(mat, getdiagonal, v);
4941: PetscCall(PetscObjectStateIncrease((PetscObject)v));
4942: PetscFunctionReturn(PETSC_SUCCESS);
4943: }
4945: /*@C
4946: MatGetRowMin - Gets the minimum value (of the real part) of each
4947: row of the matrix
4949: Logically Collective
4951: Input Parameter:
4952: . mat - the matrix
4954: Output Parameters:
4955: + v - the vector for storing the maximums
4956: - idx - the indices of the column found for each row (optional)
4958: Level: intermediate
4960: Note:
4961: The result of this call are the same as if one converted the matrix to dense format
4962: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
4964: This code is only implemented for a couple of matrix formats.
4966: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
4967: `MatGetRowMax()`
4968: @*/
4969: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
4970: {
4971: PetscFunctionBegin;
4975: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4977: if (!mat->cmap->N) {
4978: PetscCall(VecSet(v, PETSC_MAX_REAL));
4979: if (idx) {
4980: PetscInt i, m = mat->rmap->n;
4981: for (i = 0; i < m; i++) idx[i] = -1;
4982: }
4983: } else {
4984: MatCheckPreallocated(mat, 1);
4985: }
4986: PetscUseTypeMethod(mat, getrowmin, v, idx);
4987: PetscCall(PetscObjectStateIncrease((PetscObject)v));
4988: PetscFunctionReturn(PETSC_SUCCESS);
4989: }
4991: /*@C
4992: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
4993: row of the matrix
4995: Logically Collective
4997: Input Parameter:
4998: . mat - the matrix
5000: Output Parameters:
5001: + v - the vector for storing the minimums
5002: - idx - the indices of the column found for each row (or `NULL` if not needed)
5004: Level: intermediate
5006: Notes:
5007: if a row is completely empty or has only 0.0 values then the `idx` value for that
5008: row is 0 (the first column).
5010: This code is only implemented for a couple of matrix formats.
5012: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5013: @*/
5014: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5015: {
5016: PetscFunctionBegin;
5020: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5021: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5023: if (!mat->cmap->N) {
5024: PetscCall(VecSet(v, 0.0));
5025: if (idx) {
5026: PetscInt i, m = mat->rmap->n;
5027: for (i = 0; i < m; i++) idx[i] = -1;
5028: }
5029: } else {
5030: MatCheckPreallocated(mat, 1);
5031: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5032: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5033: }
5034: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5035: PetscFunctionReturn(PETSC_SUCCESS);
5036: }
5038: /*@C
5039: MatGetRowMax - Gets the maximum value (of the real part) of each
5040: row of the matrix
5042: Logically Collective
5044: Input Parameter:
5045: . mat - the matrix
5047: Output Parameters:
5048: + v - the vector for storing the maximums
5049: - idx - the indices of the column found for each row (optional)
5051: Level: intermediate
5053: Notes:
5054: The result of this call are the same as if one converted the matrix to dense format
5055: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5057: This code is only implemented for a couple of matrix formats.
5059: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5060: @*/
5061: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5062: {
5063: PetscFunctionBegin;
5067: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5069: if (!mat->cmap->N) {
5070: PetscCall(VecSet(v, PETSC_MIN_REAL));
5071: if (idx) {
5072: PetscInt i, m = mat->rmap->n;
5073: for (i = 0; i < m; i++) idx[i] = -1;
5074: }
5075: } else {
5076: MatCheckPreallocated(mat, 1);
5077: PetscUseTypeMethod(mat, getrowmax, v, idx);
5078: }
5079: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5080: PetscFunctionReturn(PETSC_SUCCESS);
5081: }
5083: /*@C
5084: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5085: row of the matrix
5087: Logically Collective
5089: Input Parameter:
5090: . mat - the matrix
5092: Output Parameters:
5093: + v - the vector for storing the maximums
5094: - idx - the indices of the column found for each row (or `NULL` if not needed)
5096: Level: intermediate
5098: Notes:
5099: if a row is completely empty or has only 0.0 values then the `idx` value for that
5100: row is 0 (the first column).
5102: This code is only implemented for a couple of matrix formats.
5104: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5105: @*/
5106: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5107: {
5108: PetscFunctionBegin;
5112: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5114: if (!mat->cmap->N) {
5115: PetscCall(VecSet(v, 0.0));
5116: if (idx) {
5117: PetscInt i, m = mat->rmap->n;
5118: for (i = 0; i < m; i++) idx[i] = -1;
5119: }
5120: } else {
5121: MatCheckPreallocated(mat, 1);
5122: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5123: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5124: }
5125: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5126: PetscFunctionReturn(PETSC_SUCCESS);
5127: }
5129: /*@
5130: MatGetRowSum - Gets the sum of each row of the matrix
5132: Logically or Neighborhood Collective
5134: Input Parameter:
5135: . mat - the matrix
5137: Output Parameter:
5138: . v - the vector for storing the sum of rows
5140: Level: intermediate
5142: Note:
5143: This code is slow since it is not currently specialized for different formats
5145: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`
5146: @*/
5147: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5148: {
5149: Vec ones;
5151: PetscFunctionBegin;
5155: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5156: MatCheckPreallocated(mat, 1);
5157: PetscCall(MatCreateVecs(mat, &ones, NULL));
5158: PetscCall(VecSet(ones, 1.));
5159: PetscCall(MatMult(mat, ones, v));
5160: PetscCall(VecDestroy(&ones));
5161: PetscFunctionReturn(PETSC_SUCCESS);
5162: }
5164: /*@
5165: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5166: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5168: Collective
5170: Input Parameter:
5171: . mat - the matrix to provide the transpose
5173: Output Parameter:
5174: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5176: Level: advanced
5178: Note:
5179: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5180: routine allows bypassing that call.
5182: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5183: @*/
5184: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5185: {
5186: PetscContainer rB = NULL;
5187: MatParentState *rb = NULL;
5189: PetscFunctionBegin;
5190: PetscCall(PetscNew(&rb));
5191: rb->id = ((PetscObject)mat)->id;
5192: rb->state = 0;
5193: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5194: PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)B), &rB));
5195: PetscCall(PetscContainerSetPointer(rB, rb));
5196: PetscCall(PetscContainerSetUserDestroy(rB, PetscContainerUserDestroyDefault));
5197: PetscCall(PetscObjectCompose((PetscObject)B, "MatTransposeParent", (PetscObject)rB));
5198: PetscCall(PetscObjectDereference((PetscObject)rB));
5199: PetscFunctionReturn(PETSC_SUCCESS);
5200: }
5202: /*@
5203: MatTranspose - Computes an in-place or out-of-place transpose of a matrix.
5205: Collective
5207: Input Parameters:
5208: + mat - the matrix to transpose
5209: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5211: Output Parameter:
5212: . B - the transpose
5214: Level: intermediate
5216: Notes:
5217: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5219: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX`. If you already have a matrix to contain the
5220: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5222: If the nonzero structure of mat changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5224: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.
5226: If mat is unchanged from the last call this function returns immediately without recomputing the result
5228: If you only need the symbolic transpose, and not the numerical values, use `MatTransposeSymbolic()`
5230: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5231: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5232: @*/
5233: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5234: {
5235: PetscContainer rB = NULL;
5236: MatParentState *rb = NULL;
5238: PetscFunctionBegin;
5241: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5242: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5243: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5244: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5245: MatCheckPreallocated(mat, 1);
5246: if (reuse == MAT_REUSE_MATRIX) {
5247: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5248: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5249: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5250: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5251: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5252: }
5254: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5255: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5256: PetscUseTypeMethod(mat, transpose, reuse, B);
5257: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5258: }
5259: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5261: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5262: if (reuse != MAT_INPLACE_MATRIX) {
5263: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5264: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5265: rb->state = ((PetscObject)mat)->state;
5266: rb->nonzerostate = mat->nonzerostate;
5267: }
5268: PetscFunctionReturn(PETSC_SUCCESS);
5269: }
5271: /*@
5272: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5274: Collective
5276: Input Parameter:
5277: . A - the matrix to transpose
5279: Output Parameter:
5280: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5281: numerical portion.
5283: Level: intermediate
5285: Note:
5286: This is not supported for many matrix types, use `MatTranspose()` in those cases
5288: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5289: @*/
5290: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5291: {
5292: PetscFunctionBegin;
5295: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5296: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5297: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5298: PetscUseTypeMethod(A, transposesymbolic, B);
5299: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5301: PetscCall(MatTransposeSetPrecursor(A, *B));
5302: PetscFunctionReturn(PETSC_SUCCESS);
5303: }
5305: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5306: {
5307: PetscContainer rB;
5308: MatParentState *rb;
5310: PetscFunctionBegin;
5313: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5314: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5315: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5316: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5317: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5318: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5319: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5320: PetscFunctionReturn(PETSC_SUCCESS);
5321: }
5323: /*@
5324: MatIsTranspose - Test whether a matrix is another one's transpose,
5325: or its own, in which case it tests symmetry.
5327: Collective
5329: Input Parameters:
5330: + A - the matrix to test
5331: . B - the matrix to test against, this can equal the first parameter
5332: - tol - tolerance, differences between entries smaller than this are counted as zero
5334: Output Parameter:
5335: . flg - the result
5337: Level: intermediate
5339: Notes:
5340: Only available for `MATAIJ` matrices.
5342: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5343: test involves parallel copies of the block off-diagonal parts of the matrix.
5345: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5346: @*/
5347: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5348: {
5349: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5351: PetscFunctionBegin;
5354: PetscAssertPointer(flg, 4);
5355: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5356: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5357: *flg = PETSC_FALSE;
5358: if (f && g) {
5359: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5360: PetscCall((*f)(A, B, tol, flg));
5361: } else {
5362: MatType mattype;
5364: PetscCall(MatGetType(f ? B : A, &mattype));
5365: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5366: }
5367: PetscFunctionReturn(PETSC_SUCCESS);
5368: }
5370: /*@
5371: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5373: Collective
5375: Input Parameters:
5376: + mat - the matrix to transpose and complex conjugate
5377: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5379: Output Parameter:
5380: . B - the Hermitian transpose
5382: Level: intermediate
5384: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5385: @*/
5386: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5387: {
5388: PetscFunctionBegin;
5389: PetscCall(MatTranspose(mat, reuse, B));
5390: #if defined(PETSC_USE_COMPLEX)
5391: PetscCall(MatConjugate(*B));
5392: #endif
5393: PetscFunctionReturn(PETSC_SUCCESS);
5394: }
5396: /*@
5397: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5399: Collective
5401: Input Parameters:
5402: + A - the matrix to test
5403: . B - the matrix to test against, this can equal the first parameter
5404: - tol - tolerance, differences between entries smaller than this are counted as zero
5406: Output Parameter:
5407: . flg - the result
5409: Level: intermediate
5411: Notes:
5412: Only available for `MATAIJ` matrices.
5414: The sequential algorithm
5415: has a running time of the order of the number of nonzeros; the parallel
5416: test involves parallel copies of the block off-diagonal parts of the matrix.
5418: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5419: @*/
5420: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5421: {
5422: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5424: PetscFunctionBegin;
5427: PetscAssertPointer(flg, 4);
5428: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5429: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5430: if (f && g) {
5431: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5432: PetscCall((*f)(A, B, tol, flg));
5433: }
5434: PetscFunctionReturn(PETSC_SUCCESS);
5435: }
5437: /*@
5438: MatPermute - Creates a new matrix with rows and columns permuted from the
5439: original.
5441: Collective
5443: Input Parameters:
5444: + mat - the matrix to permute
5445: . row - row permutation, each processor supplies only the permutation for its rows
5446: - col - column permutation, each processor supplies only the permutation for its columns
5448: Output Parameter:
5449: . B - the permuted matrix
5451: Level: advanced
5453: Note:
5454: The index sets map from row/col of permuted matrix to row/col of original matrix.
5455: The index sets should be on the same communicator as mat and have the same local sizes.
5457: Developer Note:
5458: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5459: exploit the fact that row and col are permutations, consider implementing the
5460: more general `MatCreateSubMatrix()` instead.
5462: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5463: @*/
5464: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5465: {
5466: PetscFunctionBegin;
5471: PetscAssertPointer(B, 4);
5472: PetscCheckSameComm(mat, 1, row, 2);
5473: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5474: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5475: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5476: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5477: MatCheckPreallocated(mat, 1);
5479: if (mat->ops->permute) {
5480: PetscUseTypeMethod(mat, permute, row, col, B);
5481: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5482: } else {
5483: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5484: }
5485: PetscFunctionReturn(PETSC_SUCCESS);
5486: }
5488: /*@
5489: MatEqual - Compares two matrices.
5491: Collective
5493: Input Parameters:
5494: + A - the first matrix
5495: - B - the second matrix
5497: Output Parameter:
5498: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5500: Level: intermediate
5502: .seealso: [](ch_matrices), `Mat`
5503: @*/
5504: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5505: {
5506: PetscFunctionBegin;
5511: PetscAssertPointer(flg, 3);
5512: PetscCheckSameComm(A, 1, B, 2);
5513: MatCheckPreallocated(A, 1);
5514: MatCheckPreallocated(B, 2);
5515: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5516: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5517: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5518: B->cmap->N);
5519: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5520: PetscUseTypeMethod(A, equal, B, flg);
5521: } else {
5522: PetscCall(MatMultEqual(A, B, 10, flg));
5523: }
5524: PetscFunctionReturn(PETSC_SUCCESS);
5525: }
5527: /*@
5528: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5529: matrices that are stored as vectors. Either of the two scaling
5530: matrices can be `NULL`.
5532: Collective
5534: Input Parameters:
5535: + mat - the matrix to be scaled
5536: . l - the left scaling vector (or `NULL`)
5537: - r - the right scaling vector (or `NULL`)
5539: Level: intermediate
5541: Note:
5542: `MatDiagonalScale()` computes $A = LAR$, where
5543: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5544: The L scales the rows of the matrix, the R scales the columns of the matrix.
5546: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5547: @*/
5548: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5549: {
5550: PetscFunctionBegin;
5553: if (l) {
5555: PetscCheckSameComm(mat, 1, l, 2);
5556: }
5557: if (r) {
5559: PetscCheckSameComm(mat, 1, r, 3);
5560: }
5561: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5562: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5563: MatCheckPreallocated(mat, 1);
5564: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5566: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5567: PetscUseTypeMethod(mat, diagonalscale, l, r);
5568: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5569: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5570: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5571: PetscFunctionReturn(PETSC_SUCCESS);
5572: }
5574: /*@
5575: MatScale - Scales all elements of a matrix by a given number.
5577: Logically Collective
5579: Input Parameters:
5580: + mat - the matrix to be scaled
5581: - a - the scaling value
5583: Level: intermediate
5585: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5586: @*/
5587: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5588: {
5589: PetscFunctionBegin;
5592: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5593: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5595: MatCheckPreallocated(mat, 1);
5597: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5598: if (a != (PetscScalar)1.0) {
5599: PetscUseTypeMethod(mat, scale, a);
5600: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5601: }
5602: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5603: PetscFunctionReturn(PETSC_SUCCESS);
5604: }
5606: /*@
5607: MatNorm - Calculates various norms of a matrix.
5609: Collective
5611: Input Parameters:
5612: + mat - the matrix
5613: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5615: Output Parameter:
5616: . nrm - the resulting norm
5618: Level: intermediate
5620: .seealso: [](ch_matrices), `Mat`
5621: @*/
5622: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5623: {
5624: PetscFunctionBegin;
5627: PetscAssertPointer(nrm, 3);
5629: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5630: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5631: MatCheckPreallocated(mat, 1);
5633: PetscUseTypeMethod(mat, norm, type, nrm);
5634: PetscFunctionReturn(PETSC_SUCCESS);
5635: }
5637: /*
5638: This variable is used to prevent counting of MatAssemblyBegin() that
5639: are called from within a MatAssemblyEnd().
5640: */
5641: static PetscInt MatAssemblyEnd_InUse = 0;
5642: /*@
5643: MatAssemblyBegin - Begins assembling the matrix. This routine should
5644: be called after completing all calls to `MatSetValues()`.
5646: Collective
5648: Input Parameters:
5649: + mat - the matrix
5650: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5652: Level: beginner
5654: Notes:
5655: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5656: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5658: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5659: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5660: using the matrix.
5662: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5663: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5664: a global collective operation requiring all processes that share the matrix.
5666: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5667: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5668: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5670: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5671: @*/
5672: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5673: {
5674: PetscFunctionBegin;
5677: MatCheckPreallocated(mat, 1);
5678: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix.\nDid you forget to call MatSetUnfactored()?");
5679: if (mat->assembled) {
5680: mat->was_assembled = PETSC_TRUE;
5681: mat->assembled = PETSC_FALSE;
5682: }
5684: if (!MatAssemblyEnd_InUse) {
5685: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5686: PetscTryTypeMethod(mat, assemblybegin, type);
5687: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5688: } else PetscTryTypeMethod(mat, assemblybegin, type);
5689: PetscFunctionReturn(PETSC_SUCCESS);
5690: }
5692: /*@
5693: MatAssembled - Indicates if a matrix has been assembled and is ready for
5694: use; for example, in matrix-vector product.
5696: Not Collective
5698: Input Parameter:
5699: . mat - the matrix
5701: Output Parameter:
5702: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5704: Level: advanced
5706: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5707: @*/
5708: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5709: {
5710: PetscFunctionBegin;
5712: PetscAssertPointer(assembled, 2);
5713: *assembled = mat->assembled;
5714: PetscFunctionReturn(PETSC_SUCCESS);
5715: }
5717: /*@
5718: MatAssemblyEnd - Completes assembling the matrix. This routine should
5719: be called after `MatAssemblyBegin()`.
5721: Collective
5723: Input Parameters:
5724: + mat - the matrix
5725: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5727: Options Database Keys:
5728: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5729: . -mat_view ::ascii_info_detail - Prints more detailed info
5730: . -mat_view - Prints matrix in ASCII format
5731: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5732: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5733: . -display <name> - Sets display name (default is host)
5734: . -draw_pause <sec> - Sets number of seconds to pause after display
5735: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5736: . -viewer_socket_machine <machine> - Machine to use for socket
5737: . -viewer_socket_port <port> - Port number to use for socket
5738: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5740: Level: beginner
5742: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5743: @*/
5744: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5745: {
5746: static PetscInt inassm = 0;
5747: PetscBool flg = PETSC_FALSE;
5749: PetscFunctionBegin;
5753: inassm++;
5754: MatAssemblyEnd_InUse++;
5755: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5756: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5757: PetscTryTypeMethod(mat, assemblyend, type);
5758: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5759: } else PetscTryTypeMethod(mat, assemblyend, type);
5761: /* Flush assembly is not a true assembly */
5762: if (type != MAT_FLUSH_ASSEMBLY) {
5763: if (mat->num_ass) {
5764: if (!mat->symmetry_eternal) {
5765: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5766: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5767: }
5768: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5769: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5770: }
5771: mat->num_ass++;
5772: mat->assembled = PETSC_TRUE;
5773: mat->ass_nonzerostate = mat->nonzerostate;
5774: }
5776: mat->insertmode = NOT_SET_VALUES;
5777: MatAssemblyEnd_InUse--;
5778: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5779: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5780: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5782: if (mat->checksymmetryonassembly) {
5783: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5784: if (flg) {
5785: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5786: } else {
5787: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5788: }
5789: }
5790: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5791: }
5792: inassm--;
5793: PetscFunctionReturn(PETSC_SUCCESS);
5794: }
5796: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5797: /*@
5798: MatSetOption - Sets a parameter option for a matrix. Some options
5799: may be specific to certain storage formats. Some options
5800: determine how values will be inserted (or added). Sorted,
5801: row-oriented input will generally assemble the fastest. The default
5802: is row-oriented.
5804: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5806: Input Parameters:
5807: + mat - the matrix
5808: . op - the option, one of those listed below (and possibly others),
5809: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5811: Options Describing Matrix Structure:
5812: + `MAT_SPD` - symmetric positive definite
5813: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5814: . `MAT_HERMITIAN` - transpose is the complex conjugation
5815: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5816: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5817: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5818: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5820: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5821: do not need to be computed (usually at a high cost)
5823: Options For Use with `MatSetValues()`:
5824: Insert a logically dense subblock, which can be
5825: . `MAT_ROW_ORIENTED` - row-oriented (default)
5827: These options reflect the data you pass in with `MatSetValues()`; it has
5828: nothing to do with how the data is stored internally in the matrix
5829: data structure.
5831: When (re)assembling a matrix, we can restrict the input for
5832: efficiency/debugging purposes. These options include
5833: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5834: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5835: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5836: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5837: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5838: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5839: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5840: performance for very large process counts.
5841: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5842: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5843: functions, instead sending only neighbor messages.
5845: Level: intermediate
5847: Notes:
5848: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5850: Some options are relevant only for particular matrix types and
5851: are thus ignored by others. Other options are not supported by
5852: certain matrix types and will generate an error message if set.
5854: If using Fortran to compute a matrix, one may need to
5855: use the column-oriented option (or convert to the row-oriented
5856: format).
5858: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
5859: that would generate a new entry in the nonzero structure is instead
5860: ignored. Thus, if memory has not already been allocated for this particular
5861: data, then the insertion is ignored. For dense matrices, in which
5862: the entire array is allocated, no entries are ever ignored.
5863: Set after the first `MatAssemblyEnd()`. If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5865: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
5866: that would generate a new entry in the nonzero structure instead produces
5867: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5869: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
5870: that would generate a new entry that has not been preallocated will
5871: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
5872: only.) This is a useful flag when debugging matrix memory preallocation.
5873: If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5875: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
5876: other processors should be dropped, rather than stashed.
5877: This is useful if you know that the "owning" processor is also
5878: always generating the correct matrix entries, so that PETSc need
5879: not transfer duplicate entries generated on another processor.
5881: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
5882: searches during matrix assembly. When this flag is set, the hash table
5883: is created during the first matrix assembly. This hash table is
5884: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
5885: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
5886: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
5887: supported by `MATMPIBAIJ` format only.
5889: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
5890: are kept in the nonzero structure
5892: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
5893: a zero location in the matrix
5895: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
5897: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
5898: zero row routines and thus improves performance for very large process counts.
5900: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
5901: part of the matrix (since they should match the upper triangular part).
5903: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
5904: single call to `MatSetValues()`, preallocation is perfect, row oriented, `INSERT_VALUES` is used. Common
5905: with finite difference schemes with non-periodic boundary conditions.
5907: Developer Note:
5908: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
5909: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
5910: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
5911: not changed.
5913: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
5914: @*/
5915: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
5916: {
5917: PetscFunctionBegin;
5919: if (op > 0) {
5922: }
5924: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
5926: switch (op) {
5927: case MAT_FORCE_DIAGONAL_ENTRIES:
5928: mat->force_diagonals = flg;
5929: PetscFunctionReturn(PETSC_SUCCESS);
5930: case MAT_NO_OFF_PROC_ENTRIES:
5931: mat->nooffprocentries = flg;
5932: PetscFunctionReturn(PETSC_SUCCESS);
5933: case MAT_SUBSET_OFF_PROC_ENTRIES:
5934: mat->assembly_subset = flg;
5935: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
5936: #if !defined(PETSC_HAVE_MPIUNI)
5937: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
5938: #endif
5939: mat->stash.first_assembly_done = PETSC_FALSE;
5940: }
5941: PetscFunctionReturn(PETSC_SUCCESS);
5942: case MAT_NO_OFF_PROC_ZERO_ROWS:
5943: mat->nooffproczerorows = flg;
5944: PetscFunctionReturn(PETSC_SUCCESS);
5945: case MAT_SPD:
5946: if (flg) {
5947: mat->spd = PETSC_BOOL3_TRUE;
5948: mat->symmetric = PETSC_BOOL3_TRUE;
5949: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5950: } else {
5951: mat->spd = PETSC_BOOL3_FALSE;
5952: }
5953: break;
5954: case MAT_SYMMETRIC:
5955: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5956: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5957: #if !defined(PETSC_USE_COMPLEX)
5958: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5959: #endif
5960: break;
5961: case MAT_HERMITIAN:
5962: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5963: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5964: #if !defined(PETSC_USE_COMPLEX)
5965: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5966: #endif
5967: break;
5968: case MAT_STRUCTURALLY_SYMMETRIC:
5969: mat->structurally_symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5970: break;
5971: case MAT_SYMMETRY_ETERNAL:
5972: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
5973: mat->symmetry_eternal = flg;
5974: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
5975: break;
5976: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
5977: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
5978: mat->structural_symmetry_eternal = flg;
5979: break;
5980: case MAT_SPD_ETERNAL:
5981: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
5982: mat->spd_eternal = flg;
5983: if (flg) {
5984: mat->structural_symmetry_eternal = PETSC_TRUE;
5985: mat->symmetry_eternal = PETSC_TRUE;
5986: }
5987: break;
5988: case MAT_STRUCTURE_ONLY:
5989: mat->structure_only = flg;
5990: break;
5991: case MAT_SORTED_FULL:
5992: mat->sortedfull = flg;
5993: break;
5994: default:
5995: break;
5996: }
5997: PetscTryTypeMethod(mat, setoption, op, flg);
5998: PetscFunctionReturn(PETSC_SUCCESS);
5999: }
6001: /*@
6002: MatGetOption - Gets a parameter option that has been set for a matrix.
6004: Logically Collective
6006: Input Parameters:
6007: + mat - the matrix
6008: - op - the option, this only responds to certain options, check the code for which ones
6010: Output Parameter:
6011: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6013: Level: intermediate
6015: Notes:
6016: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6018: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6019: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6021: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6022: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6023: @*/
6024: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6025: {
6026: PetscFunctionBegin;
6030: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6031: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6033: switch (op) {
6034: case MAT_NO_OFF_PROC_ENTRIES:
6035: *flg = mat->nooffprocentries;
6036: break;
6037: case MAT_NO_OFF_PROC_ZERO_ROWS:
6038: *flg = mat->nooffproczerorows;
6039: break;
6040: case MAT_SYMMETRIC:
6041: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6042: break;
6043: case MAT_HERMITIAN:
6044: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6045: break;
6046: case MAT_STRUCTURALLY_SYMMETRIC:
6047: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6048: break;
6049: case MAT_SPD:
6050: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6051: break;
6052: case MAT_SYMMETRY_ETERNAL:
6053: *flg = mat->symmetry_eternal;
6054: break;
6055: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6056: *flg = mat->symmetry_eternal;
6057: break;
6058: default:
6059: break;
6060: }
6061: PetscFunctionReturn(PETSC_SUCCESS);
6062: }
6064: /*@
6065: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6066: this routine retains the old nonzero structure.
6068: Logically Collective
6070: Input Parameter:
6071: . mat - the matrix
6073: Level: intermediate
6075: Note:
6076: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6077: See the Performance chapter of the users manual for information on preallocating matrices.
6079: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6080: @*/
6081: PetscErrorCode MatZeroEntries(Mat mat)
6082: {
6083: PetscFunctionBegin;
6086: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6087: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6088: MatCheckPreallocated(mat, 1);
6090: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6091: PetscUseTypeMethod(mat, zeroentries);
6092: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6093: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6094: PetscFunctionReturn(PETSC_SUCCESS);
6095: }
6097: /*@
6098: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6099: of a set of rows and columns of a matrix.
6101: Collective
6103: Input Parameters:
6104: + mat - the matrix
6105: . numRows - the number of rows/columns to zero
6106: . rows - the global row indices
6107: . diag - value put in the diagonal of the eliminated rows
6108: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6109: - b - optional vector of the right hand side, that will be adjusted by provided solution entries
6111: Level: intermediate
6113: Notes:
6114: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6116: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6117: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6119: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6120: Krylov method to take advantage of the known solution on the zeroed rows.
6122: For the parallel case, all processes that share the matrix (i.e.,
6123: those in the communicator used for matrix creation) MUST call this
6124: routine, regardless of whether any rows being zeroed are owned by
6125: them.
6127: Unlike `MatZeroRows()` this does not change the nonzero structure of the matrix, it merely zeros those entries in the matrix.
6129: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6130: list only rows local to itself).
6132: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6134: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6135: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6136: @*/
6137: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6138: {
6139: PetscFunctionBegin;
6142: if (numRows) PetscAssertPointer(rows, 3);
6143: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6144: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6145: MatCheckPreallocated(mat, 1);
6147: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6148: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6149: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6150: PetscFunctionReturn(PETSC_SUCCESS);
6151: }
6153: /*@
6154: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6155: of a set of rows and columns of a matrix.
6157: Collective
6159: Input Parameters:
6160: + mat - the matrix
6161: . is - the rows to zero
6162: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6163: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6164: - b - optional vector of right hand side, that will be adjusted by provided solution
6166: Level: intermediate
6168: Note:
6169: See `MatZeroRowsColumns()` for details on how this routine operates.
6171: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6172: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6173: @*/
6174: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6175: {
6176: PetscInt numRows;
6177: const PetscInt *rows;
6179: PetscFunctionBegin;
6184: PetscCall(ISGetLocalSize(is, &numRows));
6185: PetscCall(ISGetIndices(is, &rows));
6186: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6187: PetscCall(ISRestoreIndices(is, &rows));
6188: PetscFunctionReturn(PETSC_SUCCESS);
6189: }
6191: /*@
6192: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6193: of a set of rows of a matrix.
6195: Collective
6197: Input Parameters:
6198: + mat - the matrix
6199: . numRows - the number of rows to zero
6200: . rows - the global row indices
6201: . diag - value put in the diagonal of the zeroed rows
6202: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6203: - b - optional vector of right hand side, that will be adjusted by provided solution entries
6205: Level: intermediate
6207: Notes:
6208: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6210: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6212: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6213: Krylov method to take advantage of the known solution on the zeroed rows.
6215: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6216: from the matrix.
6218: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6219: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense and block diagonal
6220: formats this does not alter the nonzero structure.
6222: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6223: of the matrix is not changed the values are
6224: merely zeroed.
6226: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6227: formats can optionally remove the main diagonal entry from the
6228: nonzero structure as well, by passing 0.0 as the final argument).
6230: For the parallel case, all processes that share the matrix (i.e.,
6231: those in the communicator used for matrix creation) MUST call this
6232: routine, regardless of whether any rows being zeroed are owned by
6233: them.
6235: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6236: list only rows local to itself).
6238: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6239: owns that are to be zeroed. This saves a global synchronization in the implementation.
6241: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6242: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`
6243: @*/
6244: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6245: {
6246: PetscFunctionBegin;
6249: if (numRows) PetscAssertPointer(rows, 3);
6250: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6251: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6252: MatCheckPreallocated(mat, 1);
6254: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6255: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6256: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6257: PetscFunctionReturn(PETSC_SUCCESS);
6258: }
6260: /*@
6261: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6262: of a set of rows of a matrix.
6264: Collective
6266: Input Parameters:
6267: + mat - the matrix
6268: . is - index set of rows to remove (if `NULL` then no row is removed)
6269: . diag - value put in all diagonals of eliminated rows
6270: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6271: - b - optional vector of right hand side, that will be adjusted by provided solution
6273: Level: intermediate
6275: Note:
6276: See `MatZeroRows()` for details on how this routine operates.
6278: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6279: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6280: @*/
6281: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6282: {
6283: PetscInt numRows = 0;
6284: const PetscInt *rows = NULL;
6286: PetscFunctionBegin;
6289: if (is) {
6291: PetscCall(ISGetLocalSize(is, &numRows));
6292: PetscCall(ISGetIndices(is, &rows));
6293: }
6294: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6295: if (is) PetscCall(ISRestoreIndices(is, &rows));
6296: PetscFunctionReturn(PETSC_SUCCESS);
6297: }
6299: /*@
6300: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6301: of a set of rows of a matrix. These rows must be local to the process.
6303: Collective
6305: Input Parameters:
6306: + mat - the matrix
6307: . numRows - the number of rows to remove
6308: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6309: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6310: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6311: - b - optional vector of right hand side, that will be adjusted by provided solution
6313: Level: intermediate
6315: Notes:
6316: See `MatZeroRows()` for details on how this routine operates.
6318: The grid coordinates are across the entire grid, not just the local portion
6320: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6321: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6322: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6323: `DM_BOUNDARY_PERIODIC` boundary type.
6325: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6326: a single value per point) you can skip filling those indices.
6328: Fortran Note:
6329: `idxm` and `idxn` should be declared as
6330: $ MatStencil idxm(4, m)
6331: and the values inserted using
6332: .vb
6333: idxm(MatStencil_i, 1) = i
6334: idxm(MatStencil_j, 1) = j
6335: idxm(MatStencil_k, 1) = k
6336: idxm(MatStencil_c, 1) = c
6337: etc
6338: .ve
6340: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsl()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6341: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6342: @*/
6343: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6344: {
6345: PetscInt dim = mat->stencil.dim;
6346: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6347: PetscInt *dims = mat->stencil.dims + 1;
6348: PetscInt *starts = mat->stencil.starts;
6349: PetscInt *dxm = (PetscInt *)rows;
6350: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6352: PetscFunctionBegin;
6355: if (numRows) PetscAssertPointer(rows, 3);
6357: PetscCall(PetscMalloc1(numRows, &jdxm));
6358: for (i = 0; i < numRows; ++i) {
6359: /* Skip unused dimensions (they are ordered k, j, i, c) */
6360: for (j = 0; j < 3 - sdim; ++j) dxm++;
6361: /* Local index in X dir */
6362: tmp = *dxm++ - starts[0];
6363: /* Loop over remaining dimensions */
6364: for (j = 0; j < dim - 1; ++j) {
6365: /* If nonlocal, set index to be negative */
6366: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6367: /* Update local index */
6368: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6369: }
6370: /* Skip component slot if necessary */
6371: if (mat->stencil.noc) dxm++;
6372: /* Local row number */
6373: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6374: }
6375: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6376: PetscCall(PetscFree(jdxm));
6377: PetscFunctionReturn(PETSC_SUCCESS);
6378: }
6380: /*@
6381: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6382: of a set of rows and columns of a matrix.
6384: Collective
6386: Input Parameters:
6387: + mat - the matrix
6388: . numRows - the number of rows/columns to remove
6389: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6390: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6391: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6392: - b - optional vector of right hand side, that will be adjusted by provided solution
6394: Level: intermediate
6396: Notes:
6397: See `MatZeroRowsColumns()` for details on how this routine operates.
6399: The grid coordinates are across the entire grid, not just the local portion
6401: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6402: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6403: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6404: `DM_BOUNDARY_PERIODIC` boundary type.
6406: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6407: a single value per point) you can skip filling those indices.
6409: Fortran Note:
6410: `idxm` and `idxn` should be declared as
6411: $ MatStencil idxm(4, m)
6412: and the values inserted using
6413: .vb
6414: idxm(MatStencil_i, 1) = i
6415: idxm(MatStencil_j, 1) = j
6416: idxm(MatStencil_k, 1) = k
6417: idxm(MatStencil_c, 1) = c
6418: etc
6419: .ve
6421: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6422: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6423: @*/
6424: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6425: {
6426: PetscInt dim = mat->stencil.dim;
6427: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6428: PetscInt *dims = mat->stencil.dims + 1;
6429: PetscInt *starts = mat->stencil.starts;
6430: PetscInt *dxm = (PetscInt *)rows;
6431: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6433: PetscFunctionBegin;
6436: if (numRows) PetscAssertPointer(rows, 3);
6438: PetscCall(PetscMalloc1(numRows, &jdxm));
6439: for (i = 0; i < numRows; ++i) {
6440: /* Skip unused dimensions (they are ordered k, j, i, c) */
6441: for (j = 0; j < 3 - sdim; ++j) dxm++;
6442: /* Local index in X dir */
6443: tmp = *dxm++ - starts[0];
6444: /* Loop over remaining dimensions */
6445: for (j = 0; j < dim - 1; ++j) {
6446: /* If nonlocal, set index to be negative */
6447: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6448: /* Update local index */
6449: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6450: }
6451: /* Skip component slot if necessary */
6452: if (mat->stencil.noc) dxm++;
6453: /* Local row number */
6454: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6455: }
6456: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6457: PetscCall(PetscFree(jdxm));
6458: PetscFunctionReturn(PETSC_SUCCESS);
6459: }
6461: /*@C
6462: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6463: of a set of rows of a matrix; using local numbering of rows.
6465: Collective
6467: Input Parameters:
6468: + mat - the matrix
6469: . numRows - the number of rows to remove
6470: . rows - the local row indices
6471: . diag - value put in all diagonals of eliminated rows
6472: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6473: - b - optional vector of right hand side, that will be adjusted by provided solution
6475: Level: intermediate
6477: Notes:
6478: Before calling `MatZeroRowsLocal()`, the user must first set the
6479: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6481: See `MatZeroRows()` for details on how this routine operates.
6483: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6484: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6485: @*/
6486: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6487: {
6488: PetscFunctionBegin;
6491: if (numRows) PetscAssertPointer(rows, 3);
6492: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6493: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6494: MatCheckPreallocated(mat, 1);
6496: if (mat->ops->zerorowslocal) {
6497: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6498: } else {
6499: IS is, newis;
6500: const PetscInt *newRows;
6502: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6503: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6504: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6505: PetscCall(ISGetIndices(newis, &newRows));
6506: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6507: PetscCall(ISRestoreIndices(newis, &newRows));
6508: PetscCall(ISDestroy(&newis));
6509: PetscCall(ISDestroy(&is));
6510: }
6511: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6512: PetscFunctionReturn(PETSC_SUCCESS);
6513: }
6515: /*@
6516: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6517: of a set of rows of a matrix; using local numbering of rows.
6519: Collective
6521: Input Parameters:
6522: + mat - the matrix
6523: . is - index set of rows to remove
6524: . diag - value put in all diagonals of eliminated rows
6525: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6526: - b - optional vector of right hand side, that will be adjusted by provided solution
6528: Level: intermediate
6530: Notes:
6531: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6532: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6534: See `MatZeroRows()` for details on how this routine operates.
6536: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6537: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6538: @*/
6539: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6540: {
6541: PetscInt numRows;
6542: const PetscInt *rows;
6544: PetscFunctionBegin;
6548: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6549: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6550: MatCheckPreallocated(mat, 1);
6552: PetscCall(ISGetLocalSize(is, &numRows));
6553: PetscCall(ISGetIndices(is, &rows));
6554: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6555: PetscCall(ISRestoreIndices(is, &rows));
6556: PetscFunctionReturn(PETSC_SUCCESS);
6557: }
6559: /*@
6560: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6561: of a set of rows and columns of a matrix; using local numbering of rows.
6563: Collective
6565: Input Parameters:
6566: + mat - the matrix
6567: . numRows - the number of rows to remove
6568: . rows - the global row indices
6569: . diag - value put in all diagonals of eliminated rows
6570: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6571: - b - optional vector of right hand side, that will be adjusted by provided solution
6573: Level: intermediate
6575: Notes:
6576: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6577: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6579: See `MatZeroRowsColumns()` for details on how this routine operates.
6581: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6582: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6583: @*/
6584: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6585: {
6586: IS is, newis;
6587: const PetscInt *newRows;
6589: PetscFunctionBegin;
6592: if (numRows) PetscAssertPointer(rows, 3);
6593: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6594: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6595: MatCheckPreallocated(mat, 1);
6597: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6598: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6599: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6600: PetscCall(ISGetIndices(newis, &newRows));
6601: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6602: PetscCall(ISRestoreIndices(newis, &newRows));
6603: PetscCall(ISDestroy(&newis));
6604: PetscCall(ISDestroy(&is));
6605: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6606: PetscFunctionReturn(PETSC_SUCCESS);
6607: }
6609: /*@
6610: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6611: of a set of rows and columns of a matrix; using local numbering of rows.
6613: Collective
6615: Input Parameters:
6616: + mat - the matrix
6617: . is - index set of rows to remove
6618: . diag - value put in all diagonals of eliminated rows
6619: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6620: - b - optional vector of right hand side, that will be adjusted by provided solution
6622: Level: intermediate
6624: Notes:
6625: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6626: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6628: See `MatZeroRowsColumns()` for details on how this routine operates.
6630: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6631: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6632: @*/
6633: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6634: {
6635: PetscInt numRows;
6636: const PetscInt *rows;
6638: PetscFunctionBegin;
6642: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6643: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6644: MatCheckPreallocated(mat, 1);
6646: PetscCall(ISGetLocalSize(is, &numRows));
6647: PetscCall(ISGetIndices(is, &rows));
6648: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6649: PetscCall(ISRestoreIndices(is, &rows));
6650: PetscFunctionReturn(PETSC_SUCCESS);
6651: }
6653: /*@C
6654: MatGetSize - Returns the numbers of rows and columns in a matrix.
6656: Not Collective
6658: Input Parameter:
6659: . mat - the matrix
6661: Output Parameters:
6662: + m - the number of global rows
6663: - n - the number of global columns
6665: Level: beginner
6667: Note:
6668: Both output parameters can be `NULL` on input.
6670: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6671: @*/
6672: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6673: {
6674: PetscFunctionBegin;
6676: if (m) *m = mat->rmap->N;
6677: if (n) *n = mat->cmap->N;
6678: PetscFunctionReturn(PETSC_SUCCESS);
6679: }
6681: /*@C
6682: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6683: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6685: Not Collective
6687: Input Parameter:
6688: . mat - the matrix
6690: Output Parameters:
6691: + m - the number of local rows, use `NULL` to not obtain this value
6692: - n - the number of local columns, use `NULL` to not obtain this value
6694: Level: beginner
6696: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6697: @*/
6698: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6699: {
6700: PetscFunctionBegin;
6702: if (m) PetscAssertPointer(m, 2);
6703: if (n) PetscAssertPointer(n, 3);
6704: if (m) *m = mat->rmap->n;
6705: if (n) *n = mat->cmap->n;
6706: PetscFunctionReturn(PETSC_SUCCESS);
6707: }
6709: /*@C
6710: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6711: vector one multiplies this matrix by that are owned by this processor.
6713: Not Collective, unless matrix has not been allocated, then collective
6715: Input Parameter:
6716: . mat - the matrix
6718: Output Parameters:
6719: + m - the global index of the first local column, use `NULL` to not obtain this value
6720: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6722: Level: developer
6724: Note:
6725: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6726: Layouts](sec_matlayout) for details on matrix layouts.
6728: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
6729: @*/
6730: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6731: {
6732: PetscFunctionBegin;
6735: if (m) PetscAssertPointer(m, 2);
6736: if (n) PetscAssertPointer(n, 3);
6737: MatCheckPreallocated(mat, 1);
6738: if (m) *m = mat->cmap->rstart;
6739: if (n) *n = mat->cmap->rend;
6740: PetscFunctionReturn(PETSC_SUCCESS);
6741: }
6743: /*@C
6744: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6745: this MPI process.
6747: Not Collective
6749: Input Parameter:
6750: . mat - the matrix
6752: Output Parameters:
6753: + m - the global index of the first local row, use `NULL` to not obtain this value
6754: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6756: Level: beginner
6758: Note:
6759: For all matrices it returns the range of matrix rows associated with rows of a vector that
6760: would contain the result of a matrix vector product with this matrix. See [Matrix
6761: Layouts](sec_matlayout) for details on matrix layouts.
6763: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`,
6764: `PetscLayout`
6765: @*/
6766: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6767: {
6768: PetscFunctionBegin;
6771: if (m) PetscAssertPointer(m, 2);
6772: if (n) PetscAssertPointer(n, 3);
6773: MatCheckPreallocated(mat, 1);
6774: if (m) *m = mat->rmap->rstart;
6775: if (n) *n = mat->rmap->rend;
6776: PetscFunctionReturn(PETSC_SUCCESS);
6777: }
6779: /*@C
6780: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6781: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6783: Not Collective, unless matrix has not been allocated
6785: Input Parameter:
6786: . mat - the matrix
6788: Output Parameter:
6789: . ranges - start of each processors portion plus one more than the total length at the end
6791: Level: beginner
6793: Note:
6794: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6795: would contain the result of a matrix vector product with this matrix. See [Matrix
6796: Layouts](sec_matlayout) for details on matrix layouts.
6798: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
6799: @*/
6800: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt **ranges)
6801: {
6802: PetscFunctionBegin;
6805: MatCheckPreallocated(mat, 1);
6806: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6807: PetscFunctionReturn(PETSC_SUCCESS);
6808: }
6810: /*@C
6811: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
6812: vector one multiplies this vector by that are owned by each processor.
6814: Not Collective, unless matrix has not been allocated
6816: Input Parameter:
6817: . mat - the matrix
6819: Output Parameter:
6820: . ranges - start of each processors portion plus one more than the total length at the end
6822: Level: beginner
6824: Note:
6825: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
6826: Layouts](sec_matlayout) for details on matrix layouts.
6828: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`
6829: @*/
6830: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt **ranges)
6831: {
6832: PetscFunctionBegin;
6835: MatCheckPreallocated(mat, 1);
6836: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
6837: PetscFunctionReturn(PETSC_SUCCESS);
6838: }
6840: /*@C
6841: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
6843: Not Collective
6845: Input Parameter:
6846: . A - matrix
6848: Output Parameters:
6849: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
6850: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
6852: Level: intermediate
6854: Note:
6855: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
6856: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
6857: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
6858: details on matrix layouts.
6860: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatSetValues()`, ``MATELEMENTAL``, ``MATSCALAPACK``
6861: @*/
6862: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
6863: {
6864: PetscErrorCode (*f)(Mat, IS *, IS *);
6866: PetscFunctionBegin;
6867: MatCheckPreallocated(A, 1);
6868: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
6869: if (f) {
6870: PetscCall((*f)(A, rows, cols));
6871: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
6872: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
6873: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
6874: }
6875: PetscFunctionReturn(PETSC_SUCCESS);
6876: }
6878: /*@C
6879: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
6880: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
6881: to complete the factorization.
6883: Collective
6885: Input Parameters:
6886: + fact - the factorized matrix obtained with `MatGetFactor()`
6887: . mat - the matrix
6888: . row - row permutation
6889: . col - column permutation
6890: - info - structure containing
6891: .vb
6892: levels - number of levels of fill.
6893: expected fill - as ratio of original fill.
6894: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
6895: missing diagonal entries)
6896: .ve
6898: Level: developer
6900: Notes:
6901: See [Matrix Factorization](sec_matfactor) for additional information.
6903: Most users should employ the `KSP` interface for linear solvers
6904: instead of working directly with matrix algebra routines such as this.
6905: See, e.g., `KSPCreate()`.
6907: Uses the definition of level of fill as in Y. Saad, 2003
6909: Developer Note:
6910: The Fortran interface is not autogenerated as the
6911: interface definition cannot be generated correctly [due to `MatFactorInfo`]
6913: References:
6914: . * - Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6916: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
6917: `MatGetOrdering()`, `MatFactorInfo`
6918: @*/
6919: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
6920: {
6921: PetscFunctionBegin;
6926: PetscAssertPointer(info, 5);
6927: PetscAssertPointer(fact, 1);
6928: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
6929: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
6930: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6931: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6932: MatCheckPreallocated(mat, 2);
6934: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
6935: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
6936: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
6937: PetscFunctionReturn(PETSC_SUCCESS);
6938: }
6940: /*@C
6941: MatICCFactorSymbolic - Performs symbolic incomplete
6942: Cholesky factorization for a symmetric matrix. Use
6943: `MatCholeskyFactorNumeric()` to complete the factorization.
6945: Collective
6947: Input Parameters:
6948: + fact - the factorized matrix obtained with `MatGetFactor()`
6949: . mat - the matrix to be factored
6950: . perm - row and column permutation
6951: - info - structure containing
6952: .vb
6953: levels - number of levels of fill.
6954: expected fill - as ratio of original fill.
6955: .ve
6957: Level: developer
6959: Notes:
6960: Most users should employ the `KSP` interface for linear solvers
6961: instead of working directly with matrix algebra routines such as this.
6962: See, e.g., `KSPCreate()`.
6964: This uses the definition of level of fill as in Y. Saad, 2003
6966: Developer Note:
6967: The Fortran interface is not autogenerated as the
6968: interface definition cannot be generated correctly [due to `MatFactorInfo`]
6970: References:
6971: . * - Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6973: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
6974: @*/
6975: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
6976: {
6977: PetscFunctionBegin;
6981: PetscAssertPointer(info, 4);
6982: PetscAssertPointer(fact, 1);
6983: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6984: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
6985: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
6986: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6987: MatCheckPreallocated(mat, 2);
6989: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
6990: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
6991: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
6992: PetscFunctionReturn(PETSC_SUCCESS);
6993: }
6995: /*@C
6996: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
6997: points to an array of valid matrices, they may be reused to store the new
6998: submatrices.
7000: Collective
7002: Input Parameters:
7003: + mat - the matrix
7004: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7005: . irow - index set of rows to extract
7006: . icol - index set of columns to extract
7007: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7009: Output Parameter:
7010: . submat - the array of submatrices
7012: Level: advanced
7014: Notes:
7015: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7016: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7017: to extract a parallel submatrix.
7019: Some matrix types place restrictions on the row and column
7020: indices, such as that they be sorted or that they be equal to each other.
7022: The index sets may not have duplicate entries.
7024: When extracting submatrices from a parallel matrix, each processor can
7025: form a different submatrix by setting the rows and columns of its
7026: individual index sets according to the local submatrix desired.
7028: When finished using the submatrices, the user should destroy
7029: them with `MatDestroySubMatrices()`.
7031: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7032: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7034: This routine creates the matrices in submat; you should NOT create them before
7035: calling it. It also allocates the array of matrix pointers submat.
7037: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7038: request one row/column in a block, they must request all rows/columns that are in
7039: that block. For example, if the block size is 2 you cannot request just row 0 and
7040: column 0.
7042: Fortran Note:
7043: The Fortran interface is slightly different from that given below; it
7044: requires one to pass in as `submat` a `Mat` (integer) array of size at least n+1.
7046: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7047: @*/
7048: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7049: {
7050: PetscInt i;
7051: PetscBool eq;
7053: PetscFunctionBegin;
7056: if (n) {
7057: PetscAssertPointer(irow, 3);
7059: PetscAssertPointer(icol, 4);
7061: }
7062: PetscAssertPointer(submat, 6);
7063: if (n && scall == MAT_REUSE_MATRIX) {
7064: PetscAssertPointer(*submat, 6);
7066: }
7067: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7068: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7069: MatCheckPreallocated(mat, 1);
7070: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7071: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7072: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7073: for (i = 0; i < n; i++) {
7074: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7075: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7076: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7077: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7078: if (mat->boundtocpu && mat->bindingpropagates) {
7079: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7080: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7081: }
7082: #endif
7083: }
7084: PetscFunctionReturn(PETSC_SUCCESS);
7085: }
7087: /*@C
7088: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of `IS` that may live on subcomms).
7090: Collective
7092: Input Parameters:
7093: + mat - the matrix
7094: . n - the number of submatrixes to be extracted
7095: . irow - index set of rows to extract
7096: . icol - index set of columns to extract
7097: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7099: Output Parameter:
7100: . submat - the array of submatrices
7102: Level: advanced
7104: Note:
7105: This is used by `PCGASM`
7107: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7108: @*/
7109: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7110: {
7111: PetscInt i;
7112: PetscBool eq;
7114: PetscFunctionBegin;
7117: if (n) {
7118: PetscAssertPointer(irow, 3);
7120: PetscAssertPointer(icol, 4);
7122: }
7123: PetscAssertPointer(submat, 6);
7124: if (n && scall == MAT_REUSE_MATRIX) {
7125: PetscAssertPointer(*submat, 6);
7127: }
7128: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7129: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7130: MatCheckPreallocated(mat, 1);
7132: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7133: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7134: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7135: for (i = 0; i < n; i++) {
7136: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7137: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7138: }
7139: PetscFunctionReturn(PETSC_SUCCESS);
7140: }
7142: /*@C
7143: MatDestroyMatrices - Destroys an array of matrices.
7145: Collective
7147: Input Parameters:
7148: + n - the number of local matrices
7149: - mat - the matrices (this is a pointer to the array of matrices)
7151: Level: advanced
7153: Note:
7154: Frees not only the matrices, but also the array that contains the matrices
7156: Fortran Note:
7157: This does not free the array.
7159: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()` `MatDestroySubMatrices()`
7160: @*/
7161: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7162: {
7163: PetscInt i;
7165: PetscFunctionBegin;
7166: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7167: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7168: PetscAssertPointer(mat, 2);
7170: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7172: /* memory is allocated even if n = 0 */
7173: PetscCall(PetscFree(*mat));
7174: PetscFunctionReturn(PETSC_SUCCESS);
7175: }
7177: /*@C
7178: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7180: Collective
7182: Input Parameters:
7183: + n - the number of local matrices
7184: - mat - the matrices (this is a pointer to the array of matrices, just to match the calling
7185: sequence of `MatCreateSubMatrices()`)
7187: Level: advanced
7189: Note:
7190: Frees not only the matrices, but also the array that contains the matrices
7192: Fortran Note:
7193: This does not free the array.
7195: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7196: @*/
7197: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7198: {
7199: Mat mat0;
7201: PetscFunctionBegin;
7202: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7203: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7204: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7205: PetscAssertPointer(mat, 2);
7207: mat0 = (*mat)[0];
7208: if (mat0 && mat0->ops->destroysubmatrices) {
7209: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7210: } else {
7211: PetscCall(MatDestroyMatrices(n, mat));
7212: }
7213: PetscFunctionReturn(PETSC_SUCCESS);
7214: }
7216: /*@C
7217: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7219: Collective
7221: Input Parameter:
7222: . mat - the matrix
7224: Output Parameter:
7225: . matstruct - the sequential matrix with the nonzero structure of mat
7227: Level: developer
7229: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7230: @*/
7231: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7232: {
7233: PetscFunctionBegin;
7235: PetscAssertPointer(matstruct, 2);
7238: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7239: MatCheckPreallocated(mat, 1);
7241: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7242: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7243: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7244: PetscFunctionReturn(PETSC_SUCCESS);
7245: }
7247: /*@C
7248: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7250: Collective
7252: Input Parameter:
7253: . mat - the matrix (this is a pointer to the array of matrices, just to match the calling
7254: sequence of `MatGetSeqNonzeroStructure()`)
7256: Level: advanced
7258: Note:
7259: Frees not only the matrices, but also the array that contains the matrices
7261: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7262: @*/
7263: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7264: {
7265: PetscFunctionBegin;
7266: PetscAssertPointer(mat, 1);
7267: PetscCall(MatDestroy(mat));
7268: PetscFunctionReturn(PETSC_SUCCESS);
7269: }
7271: /*@
7272: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7273: replaces the index sets by larger ones that represent submatrices with
7274: additional overlap.
7276: Collective
7278: Input Parameters:
7279: + mat - the matrix
7280: . n - the number of index sets
7281: . is - the array of index sets (these index sets will changed during the call)
7282: - ov - the additional overlap requested
7284: Options Database Key:
7285: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7287: Level: developer
7289: Note:
7290: The computed overlap preserves the matrix block sizes when the blocks are square.
7291: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7292: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7294: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7295: @*/
7296: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7297: {
7298: PetscInt i, bs, cbs;
7300: PetscFunctionBegin;
7304: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7305: if (n) {
7306: PetscAssertPointer(is, 3);
7308: }
7309: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7310: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7311: MatCheckPreallocated(mat, 1);
7313: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7314: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7315: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7316: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7317: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7318: if (bs == cbs) {
7319: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7320: }
7321: PetscFunctionReturn(PETSC_SUCCESS);
7322: }
7324: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7326: /*@
7327: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7328: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7329: additional overlap.
7331: Collective
7333: Input Parameters:
7334: + mat - the matrix
7335: . n - the number of index sets
7336: . is - the array of index sets (these index sets will changed during the call)
7337: - ov - the additional overlap requested
7339: ` Options Database Key:
7340: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7342: Level: developer
7344: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7345: @*/
7346: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7347: {
7348: PetscInt i;
7350: PetscFunctionBegin;
7353: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7354: if (n) {
7355: PetscAssertPointer(is, 3);
7357: }
7358: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7359: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7360: MatCheckPreallocated(mat, 1);
7361: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7362: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7363: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7364: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7365: PetscFunctionReturn(PETSC_SUCCESS);
7366: }
7368: /*@
7369: MatGetBlockSize - Returns the matrix block size.
7371: Not Collective
7373: Input Parameter:
7374: . mat - the matrix
7376: Output Parameter:
7377: . bs - block size
7379: Level: intermediate
7381: Notes:
7382: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7384: If the block size has not been set yet this routine returns 1.
7386: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7387: @*/
7388: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7389: {
7390: PetscFunctionBegin;
7392: PetscAssertPointer(bs, 2);
7393: *bs = PetscAbs(mat->rmap->bs);
7394: PetscFunctionReturn(PETSC_SUCCESS);
7395: }
7397: /*@
7398: MatGetBlockSizes - Returns the matrix block row and column sizes.
7400: Not Collective
7402: Input Parameter:
7403: . mat - the matrix
7405: Output Parameters:
7406: + rbs - row block size
7407: - cbs - column block size
7409: Level: intermediate
7411: Notes:
7412: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7413: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7415: If a block size has not been set yet this routine returns 1.
7417: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7418: @*/
7419: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7420: {
7421: PetscFunctionBegin;
7423: if (rbs) PetscAssertPointer(rbs, 2);
7424: if (cbs) PetscAssertPointer(cbs, 3);
7425: if (rbs) *rbs = PetscAbs(mat->rmap->bs);
7426: if (cbs) *cbs = PetscAbs(mat->cmap->bs);
7427: PetscFunctionReturn(PETSC_SUCCESS);
7428: }
7430: /*@
7431: MatSetBlockSize - Sets the matrix block size.
7433: Logically Collective
7435: Input Parameters:
7436: + mat - the matrix
7437: - bs - block size
7439: Level: intermediate
7441: Notes:
7442: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7443: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7445: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7446: is compatible with the matrix local sizes.
7448: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7449: @*/
7450: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7451: {
7452: PetscFunctionBegin;
7455: PetscCall(MatSetBlockSizes(mat, bs, bs));
7456: PetscFunctionReturn(PETSC_SUCCESS);
7457: }
7459: typedef struct {
7460: PetscInt n;
7461: IS *is;
7462: Mat *mat;
7463: PetscObjectState nonzerostate;
7464: Mat C;
7465: } EnvelopeData;
7467: static PetscErrorCode EnvelopeDataDestroy(EnvelopeData *edata)
7468: {
7469: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7470: PetscCall(PetscFree(edata->is));
7471: PetscCall(PetscFree(edata));
7472: return PETSC_SUCCESS;
7473: }
7475: /*@
7476: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7477: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7479: Collective
7481: Input Parameter:
7482: . mat - the matrix
7484: Level: intermediate
7486: Notes:
7487: There can be zeros within the blocks
7489: The blocks can overlap between processes, including laying on more than two processes
7491: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7492: @*/
7493: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7494: {
7495: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7496: PetscInt *diag, *odiag, sc;
7497: VecScatter scatter;
7498: PetscScalar *seqv;
7499: const PetscScalar *parv;
7500: const PetscInt *ia, *ja;
7501: PetscBool set, flag, done;
7502: Mat AA = mat, A;
7503: MPI_Comm comm;
7504: PetscMPIInt rank, size, tag;
7505: MPI_Status status;
7506: PetscContainer container;
7507: EnvelopeData *edata;
7508: Vec seq, par;
7509: IS isglobal;
7511: PetscFunctionBegin;
7513: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7514: if (!set || !flag) {
7515: /* TODO: only needs nonzero structure of transpose */
7516: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7517: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7518: }
7519: PetscCall(MatAIJGetLocalMat(AA, &A));
7520: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7521: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7523: PetscCall(MatGetLocalSize(mat, &n, NULL));
7524: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7525: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7526: PetscCallMPI(MPI_Comm_size(comm, &size));
7527: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7529: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7531: if (rank > 0) {
7532: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7533: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7534: }
7535: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7536: for (i = 0; i < n; i++) {
7537: env = PetscMax(env, ja[ia[i + 1] - 1]);
7538: II = rstart + i;
7539: if (env == II) {
7540: starts[lblocks] = tbs;
7541: sizes[lblocks++] = 1 + II - tbs;
7542: tbs = 1 + II;
7543: }
7544: }
7545: if (rank < size - 1) {
7546: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7547: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7548: }
7550: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7551: if (!set || !flag) PetscCall(MatDestroy(&AA));
7552: PetscCall(MatDestroy(&A));
7554: PetscCall(PetscNew(&edata));
7555: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7556: edata->n = lblocks;
7557: /* create IS needed for extracting blocks from the original matrix */
7558: PetscCall(PetscMalloc1(lblocks, &edata->is));
7559: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7561: /* Create the resulting inverse matrix structure with preallocation information */
7562: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7563: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7564: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7565: PetscCall(MatSetType(edata->C, MATAIJ));
7567: /* Communicate the start and end of each row, from each block to the correct rank */
7568: /* TODO: Use PetscSF instead of VecScatter */
7569: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7570: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7571: PetscCall(VecGetArrayWrite(seq, &seqv));
7572: for (PetscInt i = 0; i < lblocks; i++) {
7573: for (PetscInt j = 0; j < sizes[i]; j++) {
7574: seqv[cnt] = starts[i];
7575: seqv[cnt + 1] = starts[i] + sizes[i];
7576: cnt += 2;
7577: }
7578: }
7579: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7580: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7581: sc -= cnt;
7582: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7583: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7584: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7585: PetscCall(ISDestroy(&isglobal));
7586: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7587: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7588: PetscCall(VecScatterDestroy(&scatter));
7589: PetscCall(VecDestroy(&seq));
7590: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7591: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7592: PetscCall(VecGetArrayRead(par, &parv));
7593: cnt = 0;
7594: PetscCall(MatGetSize(mat, NULL, &n));
7595: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7596: PetscInt start, end, d = 0, od = 0;
7598: start = (PetscInt)PetscRealPart(parv[cnt]);
7599: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7600: cnt += 2;
7602: if (start < cstart) {
7603: od += cstart - start + n - cend;
7604: d += cend - cstart;
7605: } else if (start < cend) {
7606: od += n - cend;
7607: d += cend - start;
7608: } else od += n - start;
7609: if (end <= cstart) {
7610: od -= cstart - end + n - cend;
7611: d -= cend - cstart;
7612: } else if (end < cend) {
7613: od -= n - cend;
7614: d -= cend - end;
7615: } else od -= n - end;
7617: odiag[i] = od;
7618: diag[i] = d;
7619: }
7620: PetscCall(VecRestoreArrayRead(par, &parv));
7621: PetscCall(VecDestroy(&par));
7622: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7623: PetscCall(PetscFree2(diag, odiag));
7624: PetscCall(PetscFree2(sizes, starts));
7626: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7627: PetscCall(PetscContainerSetPointer(container, edata));
7628: PetscCall(PetscContainerSetUserDestroy(container, (PetscErrorCode(*)(void *))EnvelopeDataDestroy));
7629: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7630: PetscCall(PetscObjectDereference((PetscObject)container));
7631: PetscFunctionReturn(PETSC_SUCCESS);
7632: }
7634: /*@
7635: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7637: Collective
7639: Input Parameters:
7640: + A - the matrix
7641: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7643: Output Parameter:
7644: . C - matrix with inverted block diagonal of `A`
7646: Level: advanced
7648: Note:
7649: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7651: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7652: @*/
7653: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7654: {
7655: PetscContainer container;
7656: EnvelopeData *edata;
7657: PetscObjectState nonzerostate;
7659: PetscFunctionBegin;
7660: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7661: if (!container) {
7662: PetscCall(MatComputeVariableBlockEnvelope(A));
7663: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7664: }
7665: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7666: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7667: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7668: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7670: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7671: *C = edata->C;
7673: for (PetscInt i = 0; i < edata->n; i++) {
7674: Mat D;
7675: PetscScalar *dvalues;
7677: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7678: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7679: PetscCall(MatSeqDenseInvert(D));
7680: PetscCall(MatDenseGetArray(D, &dvalues));
7681: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7682: PetscCall(MatDestroy(&D));
7683: }
7684: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7685: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7686: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7687: PetscFunctionReturn(PETSC_SUCCESS);
7688: }
7690: /*@
7691: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7693: Logically Collective
7695: Input Parameters:
7696: + mat - the matrix
7697: . nblocks - the number of blocks on this process, each block can only exist on a single process
7698: - bsizes - the block sizes
7700: Level: intermediate
7702: Notes:
7703: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7705: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7707: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7708: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7709: @*/
7710: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, PetscInt *bsizes)
7711: {
7712: PetscInt i, ncnt = 0, nlocal;
7714: PetscFunctionBegin;
7716: PetscCheck(nblocks >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks must be great than or equal to zero");
7717: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7718: for (i = 0; i < nblocks; i++) ncnt += bsizes[i];
7719: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7720: PetscCall(PetscFree(mat->bsizes));
7721: mat->nblocks = nblocks;
7722: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7723: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7724: PetscFunctionReturn(PETSC_SUCCESS);
7725: }
7727: /*@C
7728: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7730: Logically Collective; No Fortran Support
7732: Input Parameter:
7733: . mat - the matrix
7735: Output Parameters:
7736: + nblocks - the number of blocks on this process
7737: - bsizes - the block sizes
7739: Level: intermediate
7741: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7742: @*/
7743: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt **bsizes)
7744: {
7745: PetscFunctionBegin;
7747: *nblocks = mat->nblocks;
7748: *bsizes = mat->bsizes;
7749: PetscFunctionReturn(PETSC_SUCCESS);
7750: }
7752: /*@
7753: MatSetBlockSizes - Sets the matrix block row and column sizes.
7755: Logically Collective
7757: Input Parameters:
7758: + mat - the matrix
7759: . rbs - row block size
7760: - cbs - column block size
7762: Level: intermediate
7764: Notes:
7765: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7766: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7767: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7769: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7770: are compatible with the matrix local sizes.
7772: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
7774: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7775: @*/
7776: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
7777: {
7778: PetscFunctionBegin;
7782: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
7783: if (mat->rmap->refcnt) {
7784: ISLocalToGlobalMapping l2g = NULL;
7785: PetscLayout nmap = NULL;
7787: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
7788: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
7789: PetscCall(PetscLayoutDestroy(&mat->rmap));
7790: mat->rmap = nmap;
7791: mat->rmap->mapping = l2g;
7792: }
7793: if (mat->cmap->refcnt) {
7794: ISLocalToGlobalMapping l2g = NULL;
7795: PetscLayout nmap = NULL;
7797: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
7798: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
7799: PetscCall(PetscLayoutDestroy(&mat->cmap));
7800: mat->cmap = nmap;
7801: mat->cmap->mapping = l2g;
7802: }
7803: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
7804: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
7805: PetscFunctionReturn(PETSC_SUCCESS);
7806: }
7808: /*@
7809: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7811: Logically Collective
7813: Input Parameters:
7814: + mat - the matrix
7815: . fromRow - matrix from which to copy row block size
7816: - fromCol - matrix from which to copy column block size (can be same as fromRow)
7818: Level: developer
7820: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
7821: @*/
7822: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
7823: {
7824: PetscFunctionBegin;
7828: if (fromRow->rmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
7829: if (fromCol->cmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
7830: PetscFunctionReturn(PETSC_SUCCESS);
7831: }
7833: /*@
7834: MatResidual - Default routine to calculate the residual r = b - Ax
7836: Collective
7838: Input Parameters:
7839: + mat - the matrix
7840: . b - the right-hand-side
7841: - x - the approximate solution
7843: Output Parameter:
7844: . r - location to store the residual
7846: Level: developer
7848: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
7849: @*/
7850: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
7851: {
7852: PetscFunctionBegin;
7858: MatCheckPreallocated(mat, 1);
7859: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
7860: if (!mat->ops->residual) {
7861: PetscCall(MatMult(mat, x, r));
7862: PetscCall(VecAYPX(r, -1.0, b));
7863: } else {
7864: PetscUseTypeMethod(mat, residual, b, x, r);
7865: }
7866: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
7867: PetscFunctionReturn(PETSC_SUCCESS);
7868: }
7870: /*MC
7871: MatGetRowIJF90 - Obtains the compressed row storage i and j indices for the local rows of a sparse matrix
7873: Synopsis:
7874: MatGetRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
7876: Not Collective
7878: Input Parameters:
7879: + A - the matrix
7880: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7881: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7882: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7883: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7884: always used.
7886: Output Parameters:
7887: + n - number of local rows in the (possibly compressed) matrix
7888: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7889: . ja - the column indices
7890: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7891: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7893: Level: developer
7895: Note:
7896: Use `MatRestoreRowIJF90()` when you no longer need access to the data
7898: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatRestoreRowIJF90()`
7899: M*/
7901: /*MC
7902: MatRestoreRowIJF90 - restores the compressed row storage i and j indices for the local rows of a sparse matrix obtained with `MatGetRowIJF90()`
7904: Synopsis:
7905: MatRestoreRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
7907: Not Collective
7909: Input Parameters:
7910: + A - the matrix
7911: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7912: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7913: inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7914: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7915: always used.
7916: . n - number of local rows in the (possibly compressed) matrix
7917: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7918: . ja - the column indices
7919: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7920: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7922: Level: developer
7924: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatGetRowIJF90()`
7925: M*/
7927: /*@C
7928: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
7930: Collective
7932: Input Parameters:
7933: + mat - the matrix
7934: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7935: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7936: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7937: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7938: always used.
7940: Output Parameters:
7941: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
7942: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
7943: . ja - the column indices, use `NULL` if not needed
7944: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7945: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7947: Level: developer
7949: Notes:
7950: You CANNOT change any of the ia[] or ja[] values.
7952: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
7954: Fortran Notes:
7955: Use
7956: .vb
7957: PetscInt, pointer :: ia(:),ja(:)
7958: call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
7959: ! Access the ith and jth entries via ia(i) and ja(j)
7960: .ve
7962: `MatGetRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatGetRowIJF90()`
7964: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetRowIJF90()`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
7965: @*/
7966: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
7967: {
7968: PetscFunctionBegin;
7971: if (n) PetscAssertPointer(n, 5);
7972: if (ia) PetscAssertPointer(ia, 6);
7973: if (ja) PetscAssertPointer(ja, 7);
7974: if (done) PetscAssertPointer(done, 8);
7975: MatCheckPreallocated(mat, 1);
7976: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
7977: else {
7978: if (done) *done = PETSC_TRUE;
7979: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
7980: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
7981: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
7982: }
7983: PetscFunctionReturn(PETSC_SUCCESS);
7984: }
7986: /*@C
7987: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
7989: Collective
7991: Input Parameters:
7992: + mat - the matrix
7993: . shift - 1 or zero indicating we want the indices starting at 0 or 1
7994: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
7995: symmetrized
7996: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7997: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7998: always used.
7999: . n - number of columns in the (possibly compressed) matrix
8000: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8001: - ja - the row indices
8003: Output Parameter:
8004: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8006: Level: developer
8008: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8009: @*/
8010: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8011: {
8012: PetscFunctionBegin;
8015: PetscAssertPointer(n, 5);
8016: if (ia) PetscAssertPointer(ia, 6);
8017: if (ja) PetscAssertPointer(ja, 7);
8018: PetscAssertPointer(done, 8);
8019: MatCheckPreallocated(mat, 1);
8020: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8021: else {
8022: *done = PETSC_TRUE;
8023: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8024: }
8025: PetscFunctionReturn(PETSC_SUCCESS);
8026: }
8028: /*@C
8029: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8031: Collective
8033: Input Parameters:
8034: + mat - the matrix
8035: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8036: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8037: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8038: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8039: always used.
8040: . n - size of (possibly compressed) matrix
8041: . ia - the row pointers
8042: - ja - the column indices
8044: Output Parameter:
8045: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8047: Level: developer
8049: Note:
8050: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8051: us of the array after it has been restored. If you pass `NULL`, it will
8052: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8054: Fortran Note:
8055: `MatRestoreRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatRestoreRowIJF90()`
8057: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreRowIJF90()`, `MatRestoreColumnIJ()`
8058: @*/
8059: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8060: {
8061: PetscFunctionBegin;
8064: if (ia) PetscAssertPointer(ia, 6);
8065: if (ja) PetscAssertPointer(ja, 7);
8066: if (done) PetscAssertPointer(done, 8);
8067: MatCheckPreallocated(mat, 1);
8069: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8070: else {
8071: if (done) *done = PETSC_TRUE;
8072: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8073: if (n) *n = 0;
8074: if (ia) *ia = NULL;
8075: if (ja) *ja = NULL;
8076: }
8077: PetscFunctionReturn(PETSC_SUCCESS);
8078: }
8080: /*@C
8081: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8083: Collective
8085: Input Parameters:
8086: + mat - the matrix
8087: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8088: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8089: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8090: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8091: always used.
8093: Output Parameters:
8094: + n - size of (possibly compressed) matrix
8095: . ia - the column pointers
8096: . ja - the row indices
8097: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8099: Level: developer
8101: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8102: @*/
8103: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8104: {
8105: PetscFunctionBegin;
8108: if (ia) PetscAssertPointer(ia, 6);
8109: if (ja) PetscAssertPointer(ja, 7);
8110: PetscAssertPointer(done, 8);
8111: MatCheckPreallocated(mat, 1);
8113: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8114: else {
8115: *done = PETSC_TRUE;
8116: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8117: if (n) *n = 0;
8118: if (ia) *ia = NULL;
8119: if (ja) *ja = NULL;
8120: }
8121: PetscFunctionReturn(PETSC_SUCCESS);
8122: }
8124: /*@C
8125: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8126: `MatGetColumnIJ()`.
8128: Collective
8130: Input Parameters:
8131: + mat - the matrix
8132: . ncolors - maximum color value
8133: . n - number of entries in colorarray
8134: - colorarray - array indicating color for each column
8136: Output Parameter:
8137: . iscoloring - coloring generated using colorarray information
8139: Level: developer
8141: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8142: @*/
8143: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8144: {
8145: PetscFunctionBegin;
8148: PetscAssertPointer(colorarray, 4);
8149: PetscAssertPointer(iscoloring, 5);
8150: MatCheckPreallocated(mat, 1);
8152: if (!mat->ops->coloringpatch) {
8153: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8154: } else {
8155: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8156: }
8157: PetscFunctionReturn(PETSC_SUCCESS);
8158: }
8160: /*@
8161: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8163: Logically Collective
8165: Input Parameter:
8166: . mat - the factored matrix to be reset
8168: Level: developer
8170: Notes:
8171: This routine should be used only with factored matrices formed by in-place
8172: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8173: format). This option can save memory, for example, when solving nonlinear
8174: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8175: ILU(0) preconditioner.
8177: One can specify in-place ILU(0) factorization by calling
8178: .vb
8179: PCType(pc,PCILU);
8180: PCFactorSeUseInPlace(pc);
8181: .ve
8182: or by using the options -pc_type ilu -pc_factor_in_place
8184: In-place factorization ILU(0) can also be used as a local
8185: solver for the blocks within the block Jacobi or additive Schwarz
8186: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8187: for details on setting local solver options.
8189: Most users should employ the `KSP` interface for linear solvers
8190: instead of working directly with matrix algebra routines such as this.
8191: See, e.g., `KSPCreate()`.
8193: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8194: @*/
8195: PetscErrorCode MatSetUnfactored(Mat mat)
8196: {
8197: PetscFunctionBegin;
8200: MatCheckPreallocated(mat, 1);
8201: mat->factortype = MAT_FACTOR_NONE;
8202: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8203: PetscUseTypeMethod(mat, setunfactored);
8204: PetscFunctionReturn(PETSC_SUCCESS);
8205: }
8207: /*MC
8208: MatDenseGetArrayF90 - Accesses a matrix array from Fortran
8210: Synopsis:
8211: MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8213: Not Collective
8215: Input Parameter:
8216: . x - matrix
8218: Output Parameters:
8219: + xx_v - the Fortran pointer to the array
8220: - ierr - error code
8222: Example of Usage:
8223: .vb
8224: PetscScalar, pointer xx_v(:,:)
8225: ....
8226: call MatDenseGetArrayF90(x,xx_v,ierr)
8227: a = xx_v(3)
8228: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8229: .ve
8231: Level: advanced
8233: .seealso: [](ch_matrices), `Mat`, `MatDenseRestoreArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJGetArrayF90()`
8234: M*/
8236: /*MC
8237: MatDenseRestoreArrayF90 - Restores a matrix array that has been
8238: accessed with `MatDenseGetArrayF90()`.
8240: Synopsis:
8241: MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8243: Not Collective
8245: Input Parameters:
8246: + x - matrix
8247: - xx_v - the Fortran90 pointer to the array
8249: Output Parameter:
8250: . ierr - error code
8252: Example of Usage:
8253: .vb
8254: PetscScalar, pointer xx_v(:,:)
8255: ....
8256: call MatDenseGetArrayF90(x,xx_v,ierr)
8257: a = xx_v(3)
8258: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8259: .ve
8261: Level: advanced
8263: .seealso: [](ch_matrices), `Mat`, `MatDenseGetArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJRestoreArrayF90()`
8264: M*/
8266: /*MC
8267: MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran.
8269: Synopsis:
8270: MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8272: Not Collective
8274: Input Parameter:
8275: . x - matrix
8277: Output Parameters:
8278: + xx_v - the Fortran pointer to the array
8279: - ierr - error code
8281: Example of Usage:
8282: .vb
8283: PetscScalar, pointer xx_v(:)
8284: ....
8285: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8286: a = xx_v(3)
8287: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8288: .ve
8290: Level: advanced
8292: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseGetArrayF90()`
8293: M*/
8295: /*MC
8296: MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
8297: accessed with `MatSeqAIJGetArrayF90()`.
8299: Synopsis:
8300: MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8302: Not Collective
8304: Input Parameters:
8305: + x - matrix
8306: - xx_v - the Fortran90 pointer to the array
8308: Output Parameter:
8309: . ierr - error code
8311: Example of Usage:
8312: .vb
8313: PetscScalar, pointer xx_v(:)
8314: ....
8315: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8316: a = xx_v(3)
8317: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8318: .ve
8320: Level: advanced
8322: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseRestoreArrayF90()`
8323: M*/
8325: /*@
8326: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8327: as the original matrix.
8329: Collective
8331: Input Parameters:
8332: + mat - the original matrix
8333: . isrow - parallel `IS` containing the rows this processor should obtain
8334: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8335: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8337: Output Parameter:
8338: . newmat - the new submatrix, of the same type as the original matrix
8340: Level: advanced
8342: Notes:
8343: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8345: Some matrix types place restrictions on the row and column indices, such
8346: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8347: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8349: The index sets may not have duplicate entries.
8351: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8352: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8353: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8354: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8355: you are finished using it.
8357: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8358: the input matrix.
8360: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8362: Example usage:
8363: Consider the following 8x8 matrix with 34 non-zero values, that is
8364: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8365: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8366: as follows
8367: .vb
8368: 1 2 0 | 0 3 0 | 0 4
8369: Proc0 0 5 6 | 7 0 0 | 8 0
8370: 9 0 10 | 11 0 0 | 12 0
8371: -------------------------------------
8372: 13 0 14 | 15 16 17 | 0 0
8373: Proc1 0 18 0 | 19 20 21 | 0 0
8374: 0 0 0 | 22 23 0 | 24 0
8375: -------------------------------------
8376: Proc2 25 26 27 | 0 0 28 | 29 0
8377: 30 0 0 | 31 32 33 | 0 34
8378: .ve
8380: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8382: .vb
8383: 2 0 | 0 3 0 | 0
8384: Proc0 5 6 | 7 0 0 | 8
8385: -------------------------------
8386: Proc1 18 0 | 19 20 21 | 0
8387: -------------------------------
8388: Proc2 26 27 | 0 0 28 | 29
8389: 0 0 | 31 32 33 | 0
8390: .ve
8392: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8393: @*/
8394: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8395: {
8396: PetscMPIInt size;
8397: Mat *local;
8398: IS iscoltmp;
8399: PetscBool flg;
8401: PetscFunctionBegin;
8405: PetscAssertPointer(newmat, 5);
8408: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8409: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8411: MatCheckPreallocated(mat, 1);
8412: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8414: if (!iscol || isrow == iscol) {
8415: PetscBool stride;
8416: PetscMPIInt grabentirematrix = 0, grab;
8417: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8418: if (stride) {
8419: PetscInt first, step, n, rstart, rend;
8420: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8421: if (step == 1) {
8422: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8423: if (rstart == first) {
8424: PetscCall(ISGetLocalSize(isrow, &n));
8425: if (n == rend - rstart) grabentirematrix = 1;
8426: }
8427: }
8428: }
8429: PetscCall(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8430: if (grab) {
8431: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8432: if (cll == MAT_INITIAL_MATRIX) {
8433: *newmat = mat;
8434: PetscCall(PetscObjectReference((PetscObject)mat));
8435: }
8436: PetscFunctionReturn(PETSC_SUCCESS);
8437: }
8438: }
8440: if (!iscol) {
8441: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8442: } else {
8443: iscoltmp = iscol;
8444: }
8446: /* if original matrix is on just one processor then use submatrix generated */
8447: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8448: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8449: goto setproperties;
8450: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8451: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8452: *newmat = *local;
8453: PetscCall(PetscFree(local));
8454: goto setproperties;
8455: } else if (!mat->ops->createsubmatrix) {
8456: /* Create a new matrix type that implements the operation using the full matrix */
8457: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8458: switch (cll) {
8459: case MAT_INITIAL_MATRIX:
8460: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8461: break;
8462: case MAT_REUSE_MATRIX:
8463: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8464: break;
8465: default:
8466: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8467: }
8468: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8469: goto setproperties;
8470: }
8472: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8473: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8474: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8476: setproperties:
8477: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8478: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8479: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8480: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8481: PetscFunctionReturn(PETSC_SUCCESS);
8482: }
8484: /*@
8485: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8487: Not Collective
8489: Input Parameters:
8490: + A - the matrix we wish to propagate options from
8491: - B - the matrix we wish to propagate options to
8493: Level: beginner
8495: Note:
8496: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8498: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8499: @*/
8500: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8501: {
8502: PetscFunctionBegin;
8505: B->symmetry_eternal = A->symmetry_eternal;
8506: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8507: B->symmetric = A->symmetric;
8508: B->structurally_symmetric = A->structurally_symmetric;
8509: B->spd = A->spd;
8510: B->hermitian = A->hermitian;
8511: PetscFunctionReturn(PETSC_SUCCESS);
8512: }
8514: /*@
8515: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8516: used during the assembly process to store values that belong to
8517: other processors.
8519: Not Collective
8521: Input Parameters:
8522: + mat - the matrix
8523: . size - the initial size of the stash.
8524: - bsize - the initial size of the block-stash(if used).
8526: Options Database Keys:
8527: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8528: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8530: Level: intermediate
8532: Notes:
8533: The block-stash is used for values set with `MatSetValuesBlocked()` while
8534: the stash is used for values set with `MatSetValues()`
8536: Run with the option -info and look for output of the form
8537: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8538: to determine the appropriate value, MM, to use for size and
8539: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8540: to determine the value, BMM to use for bsize
8542: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8543: @*/
8544: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8545: {
8546: PetscFunctionBegin;
8549: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8550: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8551: PetscFunctionReturn(PETSC_SUCCESS);
8552: }
8554: /*@
8555: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8556: the matrix
8558: Neighbor-wise Collective
8560: Input Parameters:
8561: + A - the matrix
8562: . x - the vector to be multiplied by the interpolation operator
8563: - y - the vector to be added to the result
8565: Output Parameter:
8566: . w - the resulting vector
8568: Level: intermediate
8570: Notes:
8571: `w` may be the same vector as `y`.
8573: This allows one to use either the restriction or interpolation (its transpose)
8574: matrix to do the interpolation
8576: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8577: @*/
8578: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8579: {
8580: PetscInt M, N, Ny;
8582: PetscFunctionBegin;
8587: PetscCall(MatGetSize(A, &M, &N));
8588: PetscCall(VecGetSize(y, &Ny));
8589: if (M == Ny) {
8590: PetscCall(MatMultAdd(A, x, y, w));
8591: } else {
8592: PetscCall(MatMultTransposeAdd(A, x, y, w));
8593: }
8594: PetscFunctionReturn(PETSC_SUCCESS);
8595: }
8597: /*@
8598: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8599: the matrix
8601: Neighbor-wise Collective
8603: Input Parameters:
8604: + A - the matrix
8605: - x - the vector to be interpolated
8607: Output Parameter:
8608: . y - the resulting vector
8610: Level: intermediate
8612: Note:
8613: This allows one to use either the restriction or interpolation (its transpose)
8614: matrix to do the interpolation
8616: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8617: @*/
8618: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8619: {
8620: PetscInt M, N, Ny;
8622: PetscFunctionBegin;
8626: PetscCall(MatGetSize(A, &M, &N));
8627: PetscCall(VecGetSize(y, &Ny));
8628: if (M == Ny) {
8629: PetscCall(MatMult(A, x, y));
8630: } else {
8631: PetscCall(MatMultTranspose(A, x, y));
8632: }
8633: PetscFunctionReturn(PETSC_SUCCESS)