Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_SetValuesBatch;
40: PetscLogEvent MAT_ViennaCLCopyToGPU;
41: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
42: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
43: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
44: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
45: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
46: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
48: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
50: /*@
51: MatSetRandom - Sets all components of a matrix to random numbers.
53: Logically Collective
55: Input Parameters:
56: + x - the matrix
57: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
58: it will create one internally.
60: Example:
61: .vb
62: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
63: MatSetRandom(x,rctx);
64: PetscRandomDestroy(rctx);
65: .ve
67: Level: intermediate
69: Notes:
70: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
72: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
74: It generates an error if used on unassembled sparse matrices that have not been preallocated.
76: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
77: @*/
78: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
79: {
80: PetscRandom randObj = NULL;
82: PetscFunctionBegin;
86: MatCheckPreallocated(x, 1);
88: if (!rctx) {
89: MPI_Comm comm;
90: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
91: PetscCall(PetscRandomCreate(comm, &randObj));
92: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
93: PetscCall(PetscRandomSetFromOptions(randObj));
94: rctx = randObj;
95: }
96: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
97: PetscUseTypeMethod(x, setrandom, rctx);
98: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
100: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
101: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(PetscRandomDestroy(&randObj));
103: PetscFunctionReturn(PETSC_SUCCESS);
104: }
106: /*@
107: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
109: Logically Collective
111: Input Parameter:
112: . mat - the factored matrix
114: Output Parameters:
115: + pivot - the pivot value computed
116: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
117: the share the matrix
119: Level: advanced
121: Notes:
122: This routine does not work for factorizations done with external packages.
124: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
126: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
128: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
129: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
130: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
131: @*/
132: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
133: {
134: PetscFunctionBegin;
136: PetscAssertPointer(pivot, 2);
137: PetscAssertPointer(row, 3);
138: *pivot = mat->factorerror_zeropivot_value;
139: *row = mat->factorerror_zeropivot_row;
140: PetscFunctionReturn(PETSC_SUCCESS);
141: }
143: /*@
144: MatFactorGetError - gets the error code from a factorization
146: Logically Collective
148: Input Parameter:
149: . mat - the factored matrix
151: Output Parameter:
152: . err - the error code
154: Level: advanced
156: Note:
157: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
159: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
160: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
161: @*/
162: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
163: {
164: PetscFunctionBegin;
166: PetscAssertPointer(err, 2);
167: *err = mat->factorerrortype;
168: PetscFunctionReturn(PETSC_SUCCESS);
169: }
171: /*@
172: MatFactorClearError - clears the error code in a factorization
174: Logically Collective
176: Input Parameter:
177: . mat - the factored matrix
179: Level: developer
181: Note:
182: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
184: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
185: `MatGetErrorCode()`, `MatFactorError`
186: @*/
187: PetscErrorCode MatFactorClearError(Mat mat)
188: {
189: PetscFunctionBegin;
191: mat->factorerrortype = MAT_FACTOR_NOERROR;
192: mat->factorerror_zeropivot_value = 0.0;
193: mat->factorerror_zeropivot_row = 0;
194: PetscFunctionReturn(PETSC_SUCCESS);
195: }
197: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
198: {
199: Vec r, l;
200: const PetscScalar *al;
201: PetscInt i, nz, gnz, N, n, st;
203: PetscFunctionBegin;
204: PetscCall(MatCreateVecs(mat, &r, &l));
205: if (!cols) { /* nonzero rows */
206: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
207: PetscCall(MatGetSize(mat, &N, NULL));
208: PetscCall(MatGetLocalSize(mat, &n, NULL));
209: PetscCall(VecSet(l, 0.0));
210: PetscCall(VecSetRandom(r, NULL));
211: PetscCall(MatMult(mat, r, l));
212: PetscCall(VecGetArrayRead(l, &al));
213: } else { /* nonzero columns */
214: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
215: PetscCall(MatGetSize(mat, NULL, &N));
216: PetscCall(MatGetLocalSize(mat, NULL, &n));
217: PetscCall(VecSet(r, 0.0));
218: PetscCall(VecSetRandom(l, NULL));
219: PetscCall(MatMultTranspose(mat, l, r));
220: PetscCall(VecGetArrayRead(r, &al));
221: }
222: if (tol <= 0.0) {
223: for (i = 0, nz = 0; i < n; i++)
224: if (al[i] != 0.0) nz++;
225: } else {
226: for (i = 0, nz = 0; i < n; i++)
227: if (PetscAbsScalar(al[i]) > tol) nz++;
228: }
229: PetscCall(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
230: if (gnz != N) {
231: PetscInt *nzr;
232: PetscCall(PetscMalloc1(nz, &nzr));
233: if (nz) {
234: if (tol < 0) {
235: for (i = 0, nz = 0; i < n; i++)
236: if (al[i] != 0.0) nzr[nz++] = i + st;
237: } else {
238: for (i = 0, nz = 0; i < n; i++)
239: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
240: }
241: }
242: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
243: } else *nonzero = NULL;
244: if (!cols) { /* nonzero rows */
245: PetscCall(VecRestoreArrayRead(l, &al));
246: } else {
247: PetscCall(VecRestoreArrayRead(r, &al));
248: }
249: PetscCall(VecDestroy(&l));
250: PetscCall(VecDestroy(&r));
251: PetscFunctionReturn(PETSC_SUCCESS);
252: }
254: /*@
255: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
257: Input Parameter:
258: . mat - the matrix
260: Output Parameter:
261: . keptrows - the rows that are not completely zero
263: Level: intermediate
265: Note:
266: `keptrows` is set to `NULL` if all rows are nonzero.
268: Developer Note:
269: If `keptrows` is not `NULL`, it must be sorted.
271: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
272: @*/
273: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
274: {
275: PetscFunctionBegin;
278: PetscAssertPointer(keptrows, 2);
279: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
280: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
281: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
282: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
283: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
284: PetscFunctionReturn(PETSC_SUCCESS);
285: }
287: /*@
288: MatFindZeroRows - Locate all rows that are completely zero in the matrix
290: Input Parameter:
291: . mat - the matrix
293: Output Parameter:
294: . zerorows - the rows that are completely zero
296: Level: intermediate
298: Note:
299: `zerorows` is set to `NULL` if no rows are zero.
301: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
302: @*/
303: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
304: {
305: IS keptrows;
306: PetscInt m, n;
308: PetscFunctionBegin;
311: PetscAssertPointer(zerorows, 2);
312: PetscCall(MatFindNonzeroRows(mat, &keptrows));
313: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
314: In keeping with this convention, we set zerorows to NULL if there are no zero
315: rows. */
316: if (keptrows == NULL) {
317: *zerorows = NULL;
318: } else {
319: PetscCall(MatGetOwnershipRange(mat, &m, &n));
320: PetscCall(ISComplement(keptrows, m, n, zerorows));
321: PetscCall(ISDestroy(&keptrows));
322: }
323: PetscFunctionReturn(PETSC_SUCCESS);
324: }
326: /*@
327: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
329: Not Collective
331: Input Parameter:
332: . A - the matrix
334: Output Parameter:
335: . a - the diagonal part (which is a SEQUENTIAL matrix)
337: Level: advanced
339: Notes:
340: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
342: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
344: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
345: @*/
346: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
347: {
348: PetscFunctionBegin;
351: PetscAssertPointer(a, 2);
352: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
353: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
354: else {
355: PetscMPIInt size;
357: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
358: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
359: *a = A;
360: }
361: PetscFunctionReturn(PETSC_SUCCESS);
362: }
364: /*@
365: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
367: Collective
369: Input Parameter:
370: . mat - the matrix
372: Output Parameter:
373: . trace - the sum of the diagonal entries
375: Level: advanced
377: .seealso: [](ch_matrices), `Mat`
378: @*/
379: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
380: {
381: Vec diag;
383: PetscFunctionBegin;
385: PetscAssertPointer(trace, 2);
386: PetscCall(MatCreateVecs(mat, &diag, NULL));
387: PetscCall(MatGetDiagonal(mat, diag));
388: PetscCall(VecSum(diag, trace));
389: PetscCall(VecDestroy(&diag));
390: PetscFunctionReturn(PETSC_SUCCESS);
391: }
393: /*@
394: MatRealPart - Zeros out the imaginary part of the matrix
396: Logically Collective
398: Input Parameter:
399: . mat - the matrix
401: Level: advanced
403: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
404: @*/
405: PetscErrorCode MatRealPart(Mat mat)
406: {
407: PetscFunctionBegin;
410: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
411: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
412: MatCheckPreallocated(mat, 1);
413: PetscUseTypeMethod(mat, realpart);
414: PetscFunctionReturn(PETSC_SUCCESS);
415: }
417: /*@C
418: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
420: Collective
422: Input Parameter:
423: . mat - the matrix
425: Output Parameters:
426: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
427: - ghosts - the global indices of the ghost points
429: Level: advanced
431: Note:
432: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
434: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
435: @*/
436: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
437: {
438: PetscFunctionBegin;
441: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
442: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
443: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
444: else {
445: if (nghosts) *nghosts = 0;
446: if (ghosts) *ghosts = NULL;
447: }
448: PetscFunctionReturn(PETSC_SUCCESS);
449: }
451: /*@
452: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
454: Logically Collective
456: Input Parameter:
457: . mat - the matrix
459: Level: advanced
461: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
462: @*/
463: PetscErrorCode MatImaginaryPart(Mat mat)
464: {
465: PetscFunctionBegin;
468: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
469: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
470: MatCheckPreallocated(mat, 1);
471: PetscUseTypeMethod(mat, imaginarypart);
472: PetscFunctionReturn(PETSC_SUCCESS);
473: }
475: /*@
476: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure
478: Not Collective
480: Input Parameter:
481: . mat - the matrix
483: Output Parameters:
484: + missing - is any diagonal entry missing
485: - dd - first diagonal entry that is missing (optional) on this process
487: Level: advanced
489: Note:
490: This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value
492: .seealso: [](ch_matrices), `Mat`
493: @*/
494: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
495: {
496: PetscFunctionBegin;
499: PetscAssertPointer(missing, 2);
500: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
501: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
502: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
503: PetscFunctionReturn(PETSC_SUCCESS);
504: }
506: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
507: /*@C
508: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
509: for each row that you get to ensure that your application does
510: not bleed memory.
512: Not Collective
514: Input Parameters:
515: + mat - the matrix
516: - row - the row to get
518: Output Parameters:
519: + ncols - if not `NULL`, the number of nonzeros in `row`
520: . cols - if not `NULL`, the column numbers
521: - vals - if not `NULL`, the numerical values
523: Level: advanced
525: Notes:
526: This routine is provided for people who need to have direct access
527: to the structure of a matrix. We hope that we provide enough
528: high-level matrix routines that few users will need it.
530: `MatGetRow()` always returns 0-based column indices, regardless of
531: whether the internal representation is 0-based (default) or 1-based.
533: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
534: not wish to extract these quantities.
536: The user can only examine the values extracted with `MatGetRow()`;
537: the values CANNOT be altered. To change the matrix entries, one
538: must use `MatSetValues()`.
540: You can only have one call to `MatGetRow()` outstanding for a particular
541: matrix at a time, per processor. `MatGetRow()` can only obtain rows
542: associated with the given processor, it cannot get rows from the
543: other processors; for that we suggest using `MatCreateSubMatrices()`, then
544: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
545: is in the global number of rows.
547: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
549: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
551: Fortran Note:
552: The calling sequence is
553: .vb
554: MatGetRow(matrix,row,ncols,cols,values,ierr)
555: Mat matrix (input)
556: PetscInt row (input)
557: PetscInt ncols (output)
558: PetscInt cols(maxcols) (output)
559: PetscScalar values(maxcols) output
560: .ve
561: where maxcols >= maximum nonzeros in any row of the matrix.
563: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
564: @*/
565: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
566: {
567: PetscInt incols;
569: PetscFunctionBegin;
572: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
573: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
574: MatCheckPreallocated(mat, 1);
575: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
576: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
577: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
578: if (ncols) *ncols = incols;
579: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
580: PetscFunctionReturn(PETSC_SUCCESS);
581: }
583: /*@
584: MatConjugate - replaces the matrix values with their complex conjugates
586: Logically Collective
588: Input Parameter:
589: . mat - the matrix
591: Level: advanced
593: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
594: @*/
595: PetscErrorCode MatConjugate(Mat mat)
596: {
597: PetscFunctionBegin;
599: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
600: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
601: PetscUseTypeMethod(mat, conjugate);
602: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
603: }
604: PetscFunctionReturn(PETSC_SUCCESS);
605: }
607: /*@C
608: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
610: Not Collective
612: Input Parameters:
613: + mat - the matrix
614: . row - the row to get
615: . ncols - the number of nonzeros
616: . cols - the columns of the nonzeros
617: - vals - if nonzero the column values
619: Level: advanced
621: Notes:
622: This routine should be called after you have finished examining the entries.
624: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
625: us of the array after it has been restored. If you pass `NULL`, it will
626: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
628: Fortran Note:
629: `MatRestoreRow()` MUST be called after `MatGetRow()`
630: before another call to `MatGetRow()` can be made.
632: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
633: @*/
634: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
635: {
636: PetscFunctionBegin;
638: if (ncols) PetscAssertPointer(ncols, 3);
639: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
640: if (!mat->ops->restorerow) PetscFunctionReturn(PETSC_SUCCESS);
641: PetscUseTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
642: if (ncols) *ncols = 0;
643: if (cols) *cols = NULL;
644: if (vals) *vals = NULL;
645: PetscFunctionReturn(PETSC_SUCCESS);
646: }
648: /*@
649: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
650: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
652: Not Collective
654: Input Parameter:
655: . mat - the matrix
657: Level: advanced
659: Note:
660: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
662: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
663: @*/
664: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
665: {
666: PetscFunctionBegin;
669: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
670: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
671: MatCheckPreallocated(mat, 1);
672: if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
673: PetscUseTypeMethod(mat, getrowuppertriangular);
674: PetscFunctionReturn(PETSC_SUCCESS);
675: }
677: /*@
678: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
680: Not Collective
682: Input Parameter:
683: . mat - the matrix
685: Level: advanced
687: Note:
688: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
690: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
691: @*/
692: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
693: {
694: PetscFunctionBegin;
697: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
698: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
699: MatCheckPreallocated(mat, 1);
700: if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
701: PetscUseTypeMethod(mat, restorerowuppertriangular);
702: PetscFunctionReturn(PETSC_SUCCESS);
703: }
705: /*@
706: MatSetOptionsPrefix - Sets the prefix used for searching for all
707: `Mat` options in the database.
709: Logically Collective
711: Input Parameters:
712: + A - the matrix
713: - prefix - the prefix to prepend to all option names
715: Level: advanced
717: Notes:
718: A hyphen (-) must NOT be given at the beginning of the prefix name.
719: The first character of all runtime options is AUTOMATICALLY the hyphen.
721: This is NOT used for options for the factorization of the matrix. Normally the
722: prefix is automatically passed in from the PC calling the factorization. To set
723: it directly use `MatSetOptionsPrefixFactor()`
725: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
726: @*/
727: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
728: {
729: PetscFunctionBegin;
731: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
732: PetscFunctionReturn(PETSC_SUCCESS);
733: }
735: /*@
736: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
737: for matrices created with `MatGetFactor()`
739: Logically Collective
741: Input Parameters:
742: + A - the matrix
743: - prefix - the prefix to prepend to all option names for the factored matrix
745: Level: developer
747: Notes:
748: A hyphen (-) must NOT be given at the beginning of the prefix name.
749: The first character of all runtime options is AUTOMATICALLY the hyphen.
751: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
752: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
754: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
755: @*/
756: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
757: {
758: PetscFunctionBegin;
760: if (prefix) {
761: PetscAssertPointer(prefix, 2);
762: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
763: if (prefix != A->factorprefix) {
764: PetscCall(PetscFree(A->factorprefix));
765: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
766: }
767: } else PetscCall(PetscFree(A->factorprefix));
768: PetscFunctionReturn(PETSC_SUCCESS);
769: }
771: /*@
772: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
773: for matrices created with `MatGetFactor()`
775: Logically Collective
777: Input Parameters:
778: + A - the matrix
779: - prefix - the prefix to prepend to all option names for the factored matrix
781: Level: developer
783: Notes:
784: A hyphen (-) must NOT be given at the beginning of the prefix name.
785: The first character of all runtime options is AUTOMATICALLY the hyphen.
787: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
788: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
790: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
791: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
792: `MatSetOptionsPrefix()`
793: @*/
794: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
795: {
796: size_t len1, len2, new_len;
798: PetscFunctionBegin;
800: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
801: if (!A->factorprefix) {
802: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
803: PetscFunctionReturn(PETSC_SUCCESS);
804: }
805: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
807: PetscCall(PetscStrlen(A->factorprefix, &len1));
808: PetscCall(PetscStrlen(prefix, &len2));
809: new_len = len1 + len2 + 1;
810: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
811: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
812: PetscFunctionReturn(PETSC_SUCCESS);
813: }
815: /*@
816: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
817: matrix options in the database.
819: Logically Collective
821: Input Parameters:
822: + A - the matrix
823: - prefix - the prefix to prepend to all option names
825: Level: advanced
827: Note:
828: A hyphen (-) must NOT be given at the beginning of the prefix name.
829: The first character of all runtime options is AUTOMATICALLY the hyphen.
831: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
832: @*/
833: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
834: {
835: PetscFunctionBegin;
837: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
838: PetscFunctionReturn(PETSC_SUCCESS);
839: }
841: /*@
842: MatGetOptionsPrefix - Gets the prefix used for searching for all
843: matrix options in the database.
845: Not Collective
847: Input Parameter:
848: . A - the matrix
850: Output Parameter:
851: . prefix - pointer to the prefix string used
853: Level: advanced
855: Fortran Note:
856: The user should pass in a string `prefix` of
857: sufficient length to hold the prefix.
859: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
860: @*/
861: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
862: {
863: PetscFunctionBegin;
865: PetscAssertPointer(prefix, 2);
866: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
867: PetscFunctionReturn(PETSC_SUCCESS);
868: }
870: /*@
871: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
873: Not Collective
875: Input Parameter:
876: . A - the matrix
878: Output Parameter:
879: . state - the object state
881: Level: advanced
883: Note:
884: Object state is an integer which gets increased every time
885: the object is changed. By saving and later querying the object state
886: one can determine whether information about the object is still current.
888: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
890: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
891: @*/
892: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
893: {
894: PetscFunctionBegin;
896: PetscAssertPointer(state, 2);
897: PetscCall(PetscObjectStateGet((PetscObject)A, state));
898: PetscFunctionReturn(PETSC_SUCCESS);
899: }
901: /*@
902: MatResetPreallocation - Reset matrix to use the original nonzero pattern provided by the user.
904: Collective
906: Input Parameter:
907: . A - the matrix
909: Level: beginner
911: Notes:
912: The allocated memory will be shrunk after calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
914: Users can reset the preallocation to access the original memory.
916: Currently only supported for `MATAIJ` matrices.
918: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
919: @*/
920: PetscErrorCode MatResetPreallocation(Mat A)
921: {
922: PetscFunctionBegin;
925: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset preallocation after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
926: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
927: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
928: PetscFunctionReturn(PETSC_SUCCESS);
929: }
931: /*@
932: MatSetUp - Sets up the internal matrix data structures for later use.
934: Collective
936: Input Parameter:
937: . A - the matrix
939: Level: intermediate
941: Notes:
942: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
943: setting values in the matrix.
945: This routine is called internally by other matrix functions when needed so rarely needs to be called by users
947: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
948: @*/
949: PetscErrorCode MatSetUp(Mat A)
950: {
951: PetscFunctionBegin;
953: if (!((PetscObject)A)->type_name) {
954: PetscMPIInt size;
956: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
957: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
958: }
959: if (!A->preallocated) PetscTryTypeMethod(A, setup);
960: PetscCall(PetscLayoutSetUp(A->rmap));
961: PetscCall(PetscLayoutSetUp(A->cmap));
962: A->preallocated = PETSC_TRUE;
963: PetscFunctionReturn(PETSC_SUCCESS);
964: }
966: #if defined(PETSC_HAVE_SAWS)
967: #include <petscviewersaws.h>
968: #endif
970: /*
971: If threadsafety is on extraneous matrices may be printed
973: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
974: */
975: #if !defined(PETSC_HAVE_THREADSAFETY)
976: static PetscInt insidematview = 0;
977: #endif
979: /*@
980: MatViewFromOptions - View properties of the matrix based on options set in the options database
982: Collective
984: Input Parameters:
985: + A - the matrix
986: . obj - optional additional object that provides the options prefix to use
987: - name - command line option
989: Options Database Key:
990: . -mat_view [viewertype]:... - the viewer and its options
992: Level: intermediate
994: Note:
995: .vb
996: If no value is provided ascii:stdout is used
997: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
998: for example ascii::ascii_info prints just the information about the object not all details
999: unless :append is given filename opens in write mode, overwriting what was already there
1000: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
1001: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
1002: socket[:port] defaults to the standard output port
1003: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
1004: .ve
1006: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1007: @*/
1008: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1009: {
1010: PetscFunctionBegin;
1012: #if !defined(PETSC_HAVE_THREADSAFETY)
1013: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1014: #endif
1015: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1016: PetscFunctionReturn(PETSC_SUCCESS);
1017: }
1019: /*@
1020: MatView - display information about a matrix in a variety ways
1022: Collective on viewer
1024: Input Parameters:
1025: + mat - the matrix
1026: - viewer - visualization context
1028: Options Database Keys:
1029: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1030: . -mat_view ::ascii_info_detail - Prints more detailed info
1031: . -mat_view - Prints matrix in ASCII format
1032: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1033: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1034: . -display <name> - Sets display name (default is host)
1035: . -draw_pause <sec> - Sets number of seconds to pause after display
1036: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1037: . -viewer_socket_machine <machine> - -
1038: . -viewer_socket_port <port> - -
1039: . -mat_view binary - save matrix to file in binary format
1040: - -viewer_binary_filename <name> - -
1042: Level: beginner
1044: Notes:
1045: The available visualization contexts include
1046: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1047: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1048: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1049: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1051: The user can open alternative visualization contexts with
1052: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1053: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a
1054: specified file; corresponding input uses `MatLoad()`
1055: . `PetscViewerDrawOpen()` - Outputs nonzero matrix structure to
1056: an X window display
1057: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer.
1058: Currently only the `MATSEQDENSE` and `MATAIJ`
1059: matrix types support the Socket viewer.
1061: The user can call `PetscViewerPushFormat()` to specify the output
1062: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1063: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1064: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1065: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1066: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1067: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse
1068: format common among all matrix types
1069: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific
1070: format (which is in many cases the same as the default)
1071: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix
1072: size and structure (not the matrix entries)
1073: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about
1074: the matrix structure
1076: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1077: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1079: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1081: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1082: viewer is used.
1084: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1085: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1087: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1088: and then use the following mouse functions.
1089: .vb
1090: left mouse: zoom in
1091: middle mouse: zoom out
1092: right mouse: continue with the simulation
1093: .ve
1095: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1096: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1097: @*/
1098: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1099: {
1100: PetscInt rows, cols, rbs, cbs;
1101: PetscBool isascii, isstring, issaws;
1102: PetscViewerFormat format;
1103: PetscMPIInt size;
1105: PetscFunctionBegin;
1108: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1111: PetscCall(PetscViewerGetFormat(viewer, &format));
1112: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1113: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1115: #if !defined(PETSC_HAVE_THREADSAFETY)
1116: insidematview++;
1117: #endif
1118: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1119: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1120: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1121: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1123: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1124: if (isascii) {
1125: if (!mat->preallocated) {
1126: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1127: #if !defined(PETSC_HAVE_THREADSAFETY)
1128: insidematview--;
1129: #endif
1130: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1131: PetscFunctionReturn(PETSC_SUCCESS);
1132: }
1133: if (!mat->assembled) {
1134: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1135: #if !defined(PETSC_HAVE_THREADSAFETY)
1136: insidematview--;
1137: #endif
1138: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1139: PetscFunctionReturn(PETSC_SUCCESS);
1140: }
1141: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1142: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1143: MatNullSpace nullsp, transnullsp;
1145: PetscCall(PetscViewerASCIIPushTab(viewer));
1146: PetscCall(MatGetSize(mat, &rows, &cols));
1147: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1148: if (rbs != 1 || cbs != 1) {
1149: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1150: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1151: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1152: if (mat->factortype) {
1153: MatSolverType solver;
1154: PetscCall(MatFactorGetSolverType(mat, &solver));
1155: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1156: }
1157: if (mat->ops->getinfo) {
1158: MatInfo info;
1159: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1160: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1161: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1162: }
1163: PetscCall(MatGetNullSpace(mat, &nullsp));
1164: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1165: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1166: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1167: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1168: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1169: PetscCall(PetscViewerASCIIPushTab(viewer));
1170: PetscCall(MatProductView(mat, viewer));
1171: PetscCall(PetscViewerASCIIPopTab(viewer));
1172: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1173: IS tmp;
1175: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1176: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1177: PetscCall(PetscViewerASCIIPushTab(viewer));
1178: PetscCall(ISView(tmp, viewer));
1179: PetscCall(PetscViewerASCIIPopTab(viewer));
1180: PetscCall(ISDestroy(&tmp));
1181: }
1182: }
1183: } else if (issaws) {
1184: #if defined(PETSC_HAVE_SAWS)
1185: PetscMPIInt rank;
1187: PetscCall(PetscObjectName((PetscObject)mat));
1188: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1189: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1190: #endif
1191: } else if (isstring) {
1192: const char *type;
1193: PetscCall(MatGetType(mat, &type));
1194: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1195: PetscTryTypeMethod(mat, view, viewer);
1196: }
1197: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1198: PetscCall(PetscViewerASCIIPushTab(viewer));
1199: PetscUseTypeMethod(mat, viewnative, viewer);
1200: PetscCall(PetscViewerASCIIPopTab(viewer));
1201: } else if (mat->ops->view) {
1202: PetscCall(PetscViewerASCIIPushTab(viewer));
1203: PetscUseTypeMethod(mat, view, viewer);
1204: PetscCall(PetscViewerASCIIPopTab(viewer));
1205: }
1206: if (isascii) {
1207: PetscCall(PetscViewerGetFormat(viewer, &format));
1208: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1209: }
1210: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1211: #if !defined(PETSC_HAVE_THREADSAFETY)
1212: insidematview--;
1213: #endif
1214: PetscFunctionReturn(PETSC_SUCCESS);
1215: }
1217: #if defined(PETSC_USE_DEBUG)
1218: #include <../src/sys/totalview/tv_data_display.h>
1219: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1220: {
1221: TV_add_row("Local rows", "int", &mat->rmap->n);
1222: TV_add_row("Local columns", "int", &mat->cmap->n);
1223: TV_add_row("Global rows", "int", &mat->rmap->N);
1224: TV_add_row("Global columns", "int", &mat->cmap->N);
1225: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1226: return TV_format_OK;
1227: }
1228: #endif
1230: /*@
1231: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1232: with `MatView()`. The matrix format is determined from the options database.
1233: Generates a parallel MPI matrix if the communicator has more than one
1234: processor. The default matrix type is `MATAIJ`.
1236: Collective
1238: Input Parameters:
1239: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1240: or some related function before a call to `MatLoad()`
1241: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1243: Options Database Key:
1244: . -matload_block_size <bs> - set block size
1246: Level: beginner
1248: Notes:
1249: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1250: `Mat` before calling this routine if you wish to set it from the options database.
1252: `MatLoad()` automatically loads into the options database any options
1253: given in the file filename.info where filename is the name of the file
1254: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1255: file will be ignored if you use the -viewer_binary_skip_info option.
1257: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1258: sets the default matrix type AIJ and sets the local and global sizes.
1259: If type and/or size is already set, then the same are used.
1261: In parallel, each processor can load a subset of rows (or the
1262: entire matrix). This routine is especially useful when a large
1263: matrix is stored on disk and only part of it is desired on each
1264: processor. For example, a parallel solver may access only some of
1265: the rows from each processor. The algorithm used here reads
1266: relatively small blocks of data rather than reading the entire
1267: matrix and then subsetting it.
1269: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1270: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1271: or the sequence like
1272: .vb
1273: `PetscViewer` v;
1274: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1275: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1276: `PetscViewerSetFromOptions`(v);
1277: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1278: `PetscViewerFileSetName`(v,"datafile");
1279: .ve
1280: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1281: $ -viewer_type {binary, hdf5}
1283: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1284: and src/mat/tutorials/ex10.c with the second approach.
1286: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1287: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1288: Multiple objects, both matrices and vectors, can be stored within the same file.
1289: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1291: Most users should not need to know the details of the binary storage
1292: format, since `MatLoad()` and `MatView()` completely hide these details.
1293: But for anyone who is interested, the standard binary matrix storage
1294: format is
1296: .vb
1297: PetscInt MAT_FILE_CLASSID
1298: PetscInt number of rows
1299: PetscInt number of columns
1300: PetscInt total number of nonzeros
1301: PetscInt *number nonzeros in each row
1302: PetscInt *column indices of all nonzeros (starting index is zero)
1303: PetscScalar *values of all nonzeros
1304: .ve
1305: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1306: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1307: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1309: PETSc automatically does the byte swapping for
1310: machines that store the bytes reversed. Thus if you write your own binary
1311: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1312: and `PetscBinaryWrite()` to see how this may be done.
1314: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1315: Each processor's chunk is loaded independently by its owning MPI process.
1316: Multiple objects, both matrices and vectors, can be stored within the same file.
1317: They are looked up by their PetscObject name.
1319: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1320: by default the same structure and naming of the AIJ arrays and column count
1321: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1322: $ save example.mat A b -v7.3
1323: can be directly read by this routine (see Reference 1 for details).
1325: Depending on your MATLAB version, this format might be a default,
1326: otherwise you can set it as default in Preferences.
1328: Unless -nocompression flag is used to save the file in MATLAB,
1329: PETSc must be configured with ZLIB package.
1331: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1333: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1335: Corresponding `MatView()` is not yet implemented.
1337: The loaded matrix is actually a transpose of the original one in MATLAB,
1338: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1339: With this format, matrix is automatically transposed by PETSc,
1340: unless the matrix is marked as SPD or symmetric
1341: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1343: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1345: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1346: @*/
1347: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1348: {
1349: PetscBool flg;
1351: PetscFunctionBegin;
1355: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1357: flg = PETSC_FALSE;
1358: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1359: if (flg) {
1360: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1361: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1362: }
1363: flg = PETSC_FALSE;
1364: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1365: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1367: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1368: PetscUseTypeMethod(mat, load, viewer);
1369: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1370: PetscFunctionReturn(PETSC_SUCCESS);
1371: }
1373: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1374: {
1375: Mat_Redundant *redund = *redundant;
1377: PetscFunctionBegin;
1378: if (redund) {
1379: if (redund->matseq) { /* via MatCreateSubMatrices() */
1380: PetscCall(ISDestroy(&redund->isrow));
1381: PetscCall(ISDestroy(&redund->iscol));
1382: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1383: } else {
1384: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1385: PetscCall(PetscFree(redund->sbuf_j));
1386: PetscCall(PetscFree(redund->sbuf_a));
1387: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1388: PetscCall(PetscFree(redund->rbuf_j[i]));
1389: PetscCall(PetscFree(redund->rbuf_a[i]));
1390: }
1391: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1392: }
1394: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1395: PetscCall(PetscFree(redund));
1396: }
1397: PetscFunctionReturn(PETSC_SUCCESS);
1398: }
1400: /*@
1401: MatDestroy - Frees space taken by a matrix.
1403: Collective
1405: Input Parameter:
1406: . A - the matrix
1408: Level: beginner
1410: Developer Note:
1411: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1412: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1413: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1414: if changes are needed here.
1416: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1417: @*/
1418: PetscErrorCode MatDestroy(Mat *A)
1419: {
1420: PetscFunctionBegin;
1421: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1423: if (--((PetscObject)*A)->refct > 0) {
1424: *A = NULL;
1425: PetscFunctionReturn(PETSC_SUCCESS);
1426: }
1428: /* if memory was published with SAWs then destroy it */
1429: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1430: PetscTryTypeMethod(*A, destroy);
1432: PetscCall(PetscFree((*A)->factorprefix));
1433: PetscCall(PetscFree((*A)->defaultvectype));
1434: PetscCall(PetscFree((*A)->defaultrandtype));
1435: PetscCall(PetscFree((*A)->bsizes));
1436: PetscCall(PetscFree((*A)->solvertype));
1437: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1438: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1439: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1440: PetscCall(MatProductClear(*A));
1441: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1442: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1443: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1444: PetscCall(MatDestroy(&(*A)->schur));
1445: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1446: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1447: PetscCall(PetscHeaderDestroy(A));
1448: PetscFunctionReturn(PETSC_SUCCESS);
1449: }
1451: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1452: /*@
1453: MatSetValues - Inserts or adds a block of values into a matrix.
1454: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1455: MUST be called after all calls to `MatSetValues()` have been completed.
1457: Not Collective
1459: Input Parameters:
1460: + mat - the matrix
1461: . v - a logically two-dimensional array of values
1462: . m - the number of rows
1463: . idxm - the global indices of the rows
1464: . n - the number of columns
1465: . idxn - the global indices of the columns
1466: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1468: Level: beginner
1470: Notes:
1471: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1473: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1474: options cannot be mixed without intervening calls to the assembly
1475: routines.
1477: `MatSetValues()` uses 0-based row and column numbers in Fortran
1478: as well as in C.
1480: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1481: simply ignored. This allows easily inserting element stiffness matrices
1482: with homogeneous Dirichlet boundary conditions that you don't want represented
1483: in the matrix.
1485: Efficiency Alert:
1486: The routine `MatSetValuesBlocked()` may offer much better efficiency
1487: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1489: Fortran Notes:
1490: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1491: .vb
1492: MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES)
1493: .ve
1495: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1497: Developer Note:
1498: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1499: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1501: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1502: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1503: @*/
1504: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1505: {
1506: PetscFunctionBeginHot;
1509: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1510: PetscAssertPointer(idxm, 3);
1511: PetscAssertPointer(idxn, 5);
1512: MatCheckPreallocated(mat, 1);
1514: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1515: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1517: if (PetscDefined(USE_DEBUG)) {
1518: PetscInt i, j;
1520: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1521: if (v) {
1522: for (i = 0; i < m; i++) {
1523: for (j = 0; j < n; j++) {
1524: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1525: #if defined(PETSC_USE_COMPLEX)
1526: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1527: #else
1528: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1529: #endif
1530: }
1531: }
1532: }
1533: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1534: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1535: }
1537: if (mat->assembled) {
1538: mat->was_assembled = PETSC_TRUE;
1539: mat->assembled = PETSC_FALSE;
1540: }
1541: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1542: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1543: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1544: PetscFunctionReturn(PETSC_SUCCESS);
1545: }
1547: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1548: /*@
1549: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1550: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1551: MUST be called after all calls to `MatSetValues()` have been completed.
1553: Not Collective
1555: Input Parameters:
1556: + mat - the matrix
1557: . v - a logically two-dimensional array of values
1558: . ism - the rows to provide
1559: . isn - the columns to provide
1560: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1562: Level: beginner
1564: Notes:
1565: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1567: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1568: options cannot be mixed without intervening calls to the assembly
1569: routines.
1571: `MatSetValues()` uses 0-based row and column numbers in Fortran
1572: as well as in C.
1574: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1575: simply ignored. This allows easily inserting element stiffness matrices
1576: with homogeneous Dirichlet boundary conditions that you don't want represented
1577: in the matrix.
1579: Efficiency Alert:
1580: The routine `MatSetValuesBlocked()` may offer much better efficiency
1581: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1583: This is currently not optimized for any particular `ISType`
1585: Developer Note:
1586: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1587: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1589: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1590: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1591: @*/
1592: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1593: {
1594: PetscInt m, n;
1595: const PetscInt *rows, *cols;
1597: PetscFunctionBeginHot;
1599: PetscCall(ISGetIndices(ism, &rows));
1600: PetscCall(ISGetIndices(isn, &cols));
1601: PetscCall(ISGetLocalSize(ism, &m));
1602: PetscCall(ISGetLocalSize(isn, &n));
1603: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1604: PetscCall(ISRestoreIndices(ism, &rows));
1605: PetscCall(ISRestoreIndices(isn, &cols));
1606: PetscFunctionReturn(PETSC_SUCCESS);
1607: }
1609: /*@
1610: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1611: values into a matrix
1613: Not Collective
1615: Input Parameters:
1616: + mat - the matrix
1617: . row - the (block) row to set
1618: - v - a logically two-dimensional array of values
1620: Level: intermediate
1622: Notes:
1623: The values, `v`, are column-oriented (for the block version) and sorted
1625: All the nonzero values in `row` must be provided
1627: The matrix must have previously had its column indices set, likely by having been assembled.
1629: `row` must belong to this MPI process
1631: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1632: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1633: @*/
1634: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1635: {
1636: PetscInt globalrow;
1638: PetscFunctionBegin;
1641: PetscAssertPointer(v, 3);
1642: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1643: PetscCall(MatSetValuesRow(mat, globalrow, v));
1644: PetscFunctionReturn(PETSC_SUCCESS);
1645: }
1647: /*@
1648: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1649: values into a matrix
1651: Not Collective
1653: Input Parameters:
1654: + mat - the matrix
1655: . row - the (block) row to set
1656: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1658: Level: advanced
1660: Notes:
1661: The values, `v`, are column-oriented for the block version.
1663: All the nonzeros in `row` must be provided
1665: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1667: `row` must belong to this process
1669: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1670: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1671: @*/
1672: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1673: {
1674: PetscFunctionBeginHot;
1677: MatCheckPreallocated(mat, 1);
1678: PetscAssertPointer(v, 3);
1679: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1680: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1681: mat->insertmode = INSERT_VALUES;
1683: if (mat->assembled) {
1684: mat->was_assembled = PETSC_TRUE;
1685: mat->assembled = PETSC_FALSE;
1686: }
1687: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1688: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1689: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1690: PetscFunctionReturn(PETSC_SUCCESS);
1691: }
1693: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1694: /*@
1695: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1696: Using structured grid indexing
1698: Not Collective
1700: Input Parameters:
1701: + mat - the matrix
1702: . m - number of rows being entered
1703: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1704: . n - number of columns being entered
1705: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1706: . v - a logically two-dimensional array of values
1707: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1709: Level: beginner
1711: Notes:
1712: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1714: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1715: options cannot be mixed without intervening calls to the assembly
1716: routines.
1718: The grid coordinates are across the entire grid, not just the local portion
1720: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1721: as well as in C.
1723: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1725: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1726: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1728: The columns and rows in the stencil passed in MUST be contained within the
1729: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1730: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1731: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1732: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1734: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1735: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1736: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1737: `DM_BOUNDARY_PERIODIC` boundary type.
1739: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1740: a single value per point) you can skip filling those indices.
1742: Inspired by the structured grid interface to the HYPRE package
1743: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1745: Efficiency Alert:
1746: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1747: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1749: Fortran Note:
1750: `idxm` and `idxn` should be declared as
1751: $ MatStencil idxm(4,m),idxn(4,n)
1752: and the values inserted using
1753: .vb
1754: idxm(MatStencil_i,1) = i
1755: idxm(MatStencil_j,1) = j
1756: idxm(MatStencil_k,1) = k
1757: idxm(MatStencil_c,1) = c
1758: etc
1759: .ve
1761: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1762: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1763: @*/
1764: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1765: {
1766: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1767: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1768: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1770: PetscFunctionBegin;
1771: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1774: PetscAssertPointer(idxm, 3);
1775: PetscAssertPointer(idxn, 5);
1777: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1778: jdxm = buf;
1779: jdxn = buf + m;
1780: } else {
1781: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1782: jdxm = bufm;
1783: jdxn = bufn;
1784: }
1785: for (i = 0; i < m; i++) {
1786: for (j = 0; j < 3 - sdim; j++) dxm++;
1787: tmp = *dxm++ - starts[0];
1788: for (j = 0; j < dim - 1; j++) {
1789: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1790: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1791: }
1792: if (mat->stencil.noc) dxm++;
1793: jdxm[i] = tmp;
1794: }
1795: for (i = 0; i < n; i++) {
1796: for (j = 0; j < 3 - sdim; j++) dxn++;
1797: tmp = *dxn++ - starts[0];
1798: for (j = 0; j < dim - 1; j++) {
1799: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1800: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1801: }
1802: if (mat->stencil.noc) dxn++;
1803: jdxn[i] = tmp;
1804: }
1805: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1806: PetscCall(PetscFree2(bufm, bufn));
1807: PetscFunctionReturn(PETSC_SUCCESS);
1808: }
1810: /*@
1811: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1812: Using structured grid indexing
1814: Not Collective
1816: Input Parameters:
1817: + mat - the matrix
1818: . m - number of rows being entered
1819: . idxm - grid coordinates for matrix rows being entered
1820: . n - number of columns being entered
1821: . idxn - grid coordinates for matrix columns being entered
1822: . v - a logically two-dimensional array of values
1823: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1825: Level: beginner
1827: Notes:
1828: By default the values, `v`, are row-oriented and unsorted.
1829: See `MatSetOption()` for other options.
1831: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1832: options cannot be mixed without intervening calls to the assembly
1833: routines.
1835: The grid coordinates are across the entire grid, not just the local portion
1837: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1838: as well as in C.
1840: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1842: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1843: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1845: The columns and rows in the stencil passed in MUST be contained within the
1846: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1847: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1848: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1849: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1851: Negative indices may be passed in idxm and idxn, these rows and columns are
1852: simply ignored. This allows easily inserting element stiffness matrices
1853: with homogeneous Dirichlet boundary conditions that you don't want represented
1854: in the matrix.
1856: Inspired by the structured grid interface to the HYPRE package
1857: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1859: Fortran Note:
1860: `idxm` and `idxn` should be declared as
1861: $ MatStencil idxm(4,m),idxn(4,n)
1862: and the values inserted using
1863: .vb
1864: idxm(MatStencil_i,1) = i
1865: idxm(MatStencil_j,1) = j
1866: idxm(MatStencil_k,1) = k
1867: etc
1868: .ve
1870: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1871: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1872: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1873: @*/
1874: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1875: {
1876: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1877: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1878: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1880: PetscFunctionBegin;
1881: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1884: PetscAssertPointer(idxm, 3);
1885: PetscAssertPointer(idxn, 5);
1886: PetscAssertPointer(v, 6);
1888: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1889: jdxm = buf;
1890: jdxn = buf + m;
1891: } else {
1892: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1893: jdxm = bufm;
1894: jdxn = bufn;
1895: }
1896: for (i = 0; i < m; i++) {
1897: for (j = 0; j < 3 - sdim; j++) dxm++;
1898: tmp = *dxm++ - starts[0];
1899: for (j = 0; j < sdim - 1; j++) {
1900: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1901: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1902: }
1903: dxm++;
1904: jdxm[i] = tmp;
1905: }
1906: for (i = 0; i < n; i++) {
1907: for (j = 0; j < 3 - sdim; j++) dxn++;
1908: tmp = *dxn++ - starts[0];
1909: for (j = 0; j < sdim - 1; j++) {
1910: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1911: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1912: }
1913: dxn++;
1914: jdxn[i] = tmp;
1915: }
1916: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1917: PetscCall(PetscFree2(bufm, bufn));
1918: PetscFunctionReturn(PETSC_SUCCESS);
1919: }
1921: /*@
1922: MatSetStencil - Sets the grid information for setting values into a matrix via
1923: `MatSetValuesStencil()`
1925: Not Collective
1927: Input Parameters:
1928: + mat - the matrix
1929: . dim - dimension of the grid 1, 2, or 3
1930: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1931: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1932: - dof - number of degrees of freedom per node
1934: Level: beginner
1936: Notes:
1937: Inspired by the structured grid interface to the HYPRE package
1938: (www.llnl.gov/CASC/hyper)
1940: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1941: user.
1943: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1944: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1945: @*/
1946: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1947: {
1948: PetscFunctionBegin;
1950: PetscAssertPointer(dims, 3);
1951: PetscAssertPointer(starts, 4);
1953: mat->stencil.dim = dim + (dof > 1);
1954: for (PetscInt i = 0; i < dim; i++) {
1955: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1956: mat->stencil.starts[i] = starts[dim - i - 1];
1957: }
1958: mat->stencil.dims[dim] = dof;
1959: mat->stencil.starts[dim] = 0;
1960: mat->stencil.noc = (PetscBool)(dof == 1);
1961: PetscFunctionReturn(PETSC_SUCCESS);
1962: }
1964: /*@
1965: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1967: Not Collective
1969: Input Parameters:
1970: + mat - the matrix
1971: . v - a logically two-dimensional array of values
1972: . m - the number of block rows
1973: . idxm - the global block indices
1974: . n - the number of block columns
1975: . idxn - the global block indices
1976: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
1978: Level: intermediate
1980: Notes:
1981: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
1982: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
1984: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
1985: NOT the total number of rows/columns; for example, if the block size is 2 and
1986: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
1987: The values in `idxm` would be 1 2; that is the first index for each block divided by
1988: the block size.
1990: You must call `MatSetBlockSize()` when constructing this matrix (before
1991: preallocating it).
1993: By default the values, `v`, are row-oriented, so the layout of
1994: `v` is the same as for `MatSetValues()`. See `MatSetOption()` for other options.
1996: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
1997: options cannot be mixed without intervening calls to the assembly
1998: routines.
2000: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
2001: as well as in C.
2003: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2004: simply ignored. This allows easily inserting element stiffness matrices
2005: with homogeneous Dirichlet boundary conditions that you don't want represented
2006: in the matrix.
2008: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2009: internal searching must be done to determine where to place the
2010: data in the matrix storage space. By instead inserting blocks of
2011: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2012: reduced.
2014: Example:
2015: .vb
2016: Suppose m=n=2 and block size(bs) = 2 The array is
2018: 1 2 | 3 4
2019: 5 6 | 7 8
2020: - - - | - - -
2021: 9 10 | 11 12
2022: 13 14 | 15 16
2024: v[] should be passed in like
2025: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2027: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2028: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2029: .ve
2031: Fortran Notes:
2032: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2033: .vb
2034: MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES)
2035: .ve
2037: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2039: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2040: @*/
2041: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2042: {
2043: PetscFunctionBeginHot;
2046: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2047: PetscAssertPointer(idxm, 3);
2048: PetscAssertPointer(idxn, 5);
2049: MatCheckPreallocated(mat, 1);
2050: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2051: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2052: if (PetscDefined(USE_DEBUG)) {
2053: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2054: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2055: }
2056: if (PetscDefined(USE_DEBUG)) {
2057: PetscInt rbs, cbs, M, N, i;
2058: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2059: PetscCall(MatGetSize(mat, &M, &N));
2060: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2061: for (i = 0; i < n; i++)
2062: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2063: }
2064: if (mat->assembled) {
2065: mat->was_assembled = PETSC_TRUE;
2066: mat->assembled = PETSC_FALSE;
2067: }
2068: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2069: if (mat->ops->setvaluesblocked) {
2070: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2071: } else {
2072: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2073: PetscInt i, j, bs, cbs;
2075: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2076: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2077: iidxm = buf;
2078: iidxn = buf + m * bs;
2079: } else {
2080: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2081: iidxm = bufr;
2082: iidxn = bufc;
2083: }
2084: for (i = 0; i < m; i++) {
2085: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2086: }
2087: if (m != n || bs != cbs || idxm != idxn) {
2088: for (i = 0; i < n; i++) {
2089: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2090: }
2091: } else iidxn = iidxm;
2092: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2093: PetscCall(PetscFree2(bufr, bufc));
2094: }
2095: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2096: PetscFunctionReturn(PETSC_SUCCESS);
2097: }
2099: /*@
2100: MatGetValues - Gets a block of local values from a matrix.
2102: Not Collective; can only return values that are owned by the give process
2104: Input Parameters:
2105: + mat - the matrix
2106: . v - a logically two-dimensional array for storing the values
2107: . m - the number of rows
2108: . idxm - the global indices of the rows
2109: . n - the number of columns
2110: - idxn - the global indices of the columns
2112: Level: advanced
2114: Notes:
2115: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2116: The values, `v`, are then returned in a row-oriented format,
2117: analogous to that used by default in `MatSetValues()`.
2119: `MatGetValues()` uses 0-based row and column numbers in
2120: Fortran as well as in C.
2122: `MatGetValues()` requires that the matrix has been assembled
2123: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2124: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2125: without intermediate matrix assembly.
2127: Negative row or column indices will be ignored and those locations in `v` will be
2128: left unchanged.
2130: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2131: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2132: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2134: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2135: @*/
2136: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2137: {
2138: PetscFunctionBegin;
2141: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2142: PetscAssertPointer(idxm, 3);
2143: PetscAssertPointer(idxn, 5);
2144: PetscAssertPointer(v, 6);
2145: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2146: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2147: MatCheckPreallocated(mat, 1);
2149: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2150: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2151: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2152: PetscFunctionReturn(PETSC_SUCCESS);
2153: }
2155: /*@
2156: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2157: defined previously by `MatSetLocalToGlobalMapping()`
2159: Not Collective
2161: Input Parameters:
2162: + mat - the matrix
2163: . nrow - number of rows
2164: . irow - the row local indices
2165: . ncol - number of columns
2166: - icol - the column local indices
2168: Output Parameter:
2169: . y - a logically two-dimensional array of values
2171: Level: advanced
2173: Notes:
2174: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2176: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2177: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2178: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2179: with `MatSetLocalToGlobalMapping()`.
2181: Developer Note:
2182: This is labelled with C so does not automatically generate Fortran stubs and interfaces
2183: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2185: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2186: `MatSetValuesLocal()`, `MatGetValues()`
2187: @*/
2188: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2189: {
2190: PetscFunctionBeginHot;
2193: MatCheckPreallocated(mat, 1);
2194: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2195: PetscAssertPointer(irow, 3);
2196: PetscAssertPointer(icol, 5);
2197: if (PetscDefined(USE_DEBUG)) {
2198: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2199: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2200: }
2201: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2202: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2203: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2204: else {
2205: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2206: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2207: irowm = buf;
2208: icolm = buf + nrow;
2209: } else {
2210: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2211: irowm = bufr;
2212: icolm = bufc;
2213: }
2214: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2215: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2216: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2217: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2218: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2219: PetscCall(PetscFree2(bufr, bufc));
2220: }
2221: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2222: PetscFunctionReturn(PETSC_SUCCESS);
2223: }
2225: /*@
2226: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2227: the same size. Currently, this can only be called once and creates the given matrix.
2229: Not Collective
2231: Input Parameters:
2232: + mat - the matrix
2233: . nb - the number of blocks
2234: . bs - the number of rows (and columns) in each block
2235: . rows - a concatenation of the rows for each block
2236: - v - a concatenation of logically two-dimensional arrays of values
2238: Level: advanced
2240: Notes:
2241: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2243: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2245: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2246: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2247: @*/
2248: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2249: {
2250: PetscFunctionBegin;
2253: PetscAssertPointer(rows, 4);
2254: PetscAssertPointer(v, 5);
2255: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2257: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2258: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2259: else {
2260: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2261: }
2262: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2263: PetscFunctionReturn(PETSC_SUCCESS);
2264: }
2266: /*@
2267: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2268: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2269: using a local (per-processor) numbering.
2271: Not Collective
2273: Input Parameters:
2274: + x - the matrix
2275: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2276: - cmapping - column mapping
2278: Level: intermediate
2280: Note:
2281: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2283: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2284: @*/
2285: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2286: {
2287: PetscFunctionBegin;
2292: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2293: else {
2294: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2295: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2296: }
2297: PetscFunctionReturn(PETSC_SUCCESS);
2298: }
2300: /*@
2301: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2303: Not Collective
2305: Input Parameter:
2306: . A - the matrix
2308: Output Parameters:
2309: + rmapping - row mapping
2310: - cmapping - column mapping
2312: Level: advanced
2314: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2315: @*/
2316: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2317: {
2318: PetscFunctionBegin;
2321: if (rmapping) {
2322: PetscAssertPointer(rmapping, 2);
2323: *rmapping = A->rmap->mapping;
2324: }
2325: if (cmapping) {
2326: PetscAssertPointer(cmapping, 3);
2327: *cmapping = A->cmap->mapping;
2328: }
2329: PetscFunctionReturn(PETSC_SUCCESS);
2330: }
2332: /*@
2333: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2335: Logically Collective
2337: Input Parameters:
2338: + A - the matrix
2339: . rmap - row layout
2340: - cmap - column layout
2342: Level: advanced
2344: Note:
2345: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2347: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2348: @*/
2349: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2350: {
2351: PetscFunctionBegin;
2353: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2354: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2355: PetscFunctionReturn(PETSC_SUCCESS);
2356: }
2358: /*@
2359: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2361: Not Collective
2363: Input Parameter:
2364: . A - the matrix
2366: Output Parameters:
2367: + rmap - row layout
2368: - cmap - column layout
2370: Level: advanced
2372: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2373: @*/
2374: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2375: {
2376: PetscFunctionBegin;
2379: if (rmap) {
2380: PetscAssertPointer(rmap, 2);
2381: *rmap = A->rmap;
2382: }
2383: if (cmap) {
2384: PetscAssertPointer(cmap, 3);
2385: *cmap = A->cmap;
2386: }
2387: PetscFunctionReturn(PETSC_SUCCESS);
2388: }
2390: /*@
2391: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2392: using a local numbering of the rows and columns.
2394: Not Collective
2396: Input Parameters:
2397: + mat - the matrix
2398: . nrow - number of rows
2399: . irow - the row local indices
2400: . ncol - number of columns
2401: . icol - the column local indices
2402: . y - a logically two-dimensional array of values
2403: - addv - either `INSERT_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2405: Level: intermediate
2407: Notes:
2408: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2410: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2411: options cannot be mixed without intervening calls to the assembly
2412: routines.
2414: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2415: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2417: Fortran Notes:
2418: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2419: .vb
2420: MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES)
2421: .ve
2423: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2425: Developer Note:
2426: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2427: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2429: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2430: `MatGetValuesLocal()`
2431: @*/
2432: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2433: {
2434: PetscFunctionBeginHot;
2437: MatCheckPreallocated(mat, 1);
2438: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2439: PetscAssertPointer(irow, 3);
2440: PetscAssertPointer(icol, 5);
2441: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2442: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2443: if (PetscDefined(USE_DEBUG)) {
2444: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2445: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2446: }
2448: if (mat->assembled) {
2449: mat->was_assembled = PETSC_TRUE;
2450: mat->assembled = PETSC_FALSE;
2451: }
2452: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2453: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2454: else {
2455: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2456: const PetscInt *irowm, *icolm;
2458: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2459: bufr = buf;
2460: bufc = buf + nrow;
2461: irowm = bufr;
2462: icolm = bufc;
2463: } else {
2464: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2465: irowm = bufr;
2466: icolm = bufc;
2467: }
2468: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2469: else irowm = irow;
2470: if (mat->cmap->mapping) {
2471: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2472: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2473: } else icolm = irowm;
2474: } else icolm = icol;
2475: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2476: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2477: }
2478: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2479: PetscFunctionReturn(PETSC_SUCCESS);
2480: }
2482: /*@
2483: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2484: using a local ordering of the nodes a block at a time.
2486: Not Collective
2488: Input Parameters:
2489: + mat - the matrix
2490: . nrow - number of rows
2491: . irow - the row local indices
2492: . ncol - number of columns
2493: . icol - the column local indices
2494: . y - a logically two-dimensional array of values
2495: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2497: Level: intermediate
2499: Notes:
2500: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2501: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2503: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2504: options cannot be mixed without intervening calls to the assembly
2505: routines.
2507: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2508: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2510: Fortran Notes:
2511: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2512: .vb
2513: MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES)
2514: .ve
2516: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2518: Developer Note:
2519: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2520: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2522: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2523: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2524: @*/
2525: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2526: {
2527: PetscFunctionBeginHot;
2530: MatCheckPreallocated(mat, 1);
2531: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2532: PetscAssertPointer(irow, 3);
2533: PetscAssertPointer(icol, 5);
2534: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2535: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2536: if (PetscDefined(USE_DEBUG)) {
2537: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2538: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2539: }
2541: if (mat->assembled) {
2542: mat->was_assembled = PETSC_TRUE;
2543: mat->assembled = PETSC_FALSE;
2544: }
2545: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2546: PetscInt irbs, rbs;
2547: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2548: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2549: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2550: }
2551: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2552: PetscInt icbs, cbs;
2553: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2554: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2555: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2556: }
2557: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2558: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2559: else {
2560: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2561: const PetscInt *irowm, *icolm;
2563: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2564: bufr = buf;
2565: bufc = buf + nrow;
2566: irowm = bufr;
2567: icolm = bufc;
2568: } else {
2569: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2570: irowm = bufr;
2571: icolm = bufc;
2572: }
2573: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2574: else irowm = irow;
2575: if (mat->cmap->mapping) {
2576: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2577: PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2578: } else icolm = irowm;
2579: } else icolm = icol;
2580: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2581: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2582: }
2583: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2584: PetscFunctionReturn(PETSC_SUCCESS);
2585: }
2587: /*@
2588: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2590: Collective
2592: Input Parameters:
2593: + mat - the matrix
2594: - x - the vector to be multiplied
2596: Output Parameter:
2597: . y - the result
2599: Level: developer
2601: Note:
2602: The vectors `x` and `y` cannot be the same. I.e., one cannot
2603: call `MatMultDiagonalBlock`(A,y,y).
2605: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2606: @*/
2607: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2608: {
2609: PetscFunctionBegin;
2615: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2616: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2617: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2618: MatCheckPreallocated(mat, 1);
2620: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2621: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2622: PetscFunctionReturn(PETSC_SUCCESS);
2623: }
2625: /*@
2626: MatMult - Computes the matrix-vector product, $y = Ax$.
2628: Neighbor-wise Collective
2630: Input Parameters:
2631: + mat - the matrix
2632: - x - the vector to be multiplied
2634: Output Parameter:
2635: . y - the result
2637: Level: beginner
2639: Note:
2640: The vectors `x` and `y` cannot be the same. I.e., one cannot
2641: call `MatMult`(A,y,y).
2643: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2644: @*/
2645: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2646: {
2647: PetscFunctionBegin;
2651: VecCheckAssembled(x);
2653: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2654: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2655: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2656: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2657: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2658: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2659: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2660: PetscCall(VecSetErrorIfLocked(y, 3));
2661: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2662: MatCheckPreallocated(mat, 1);
2664: PetscCall(VecLockReadPush(x));
2665: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2666: PetscUseTypeMethod(mat, mult, x, y);
2667: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2668: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2669: PetscCall(VecLockReadPop(x));
2670: PetscFunctionReturn(PETSC_SUCCESS);
2671: }
2673: /*@
2674: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2676: Neighbor-wise Collective
2678: Input Parameters:
2679: + mat - the matrix
2680: - x - the vector to be multiplied
2682: Output Parameter:
2683: . y - the result
2685: Level: beginner
2687: Notes:
2688: The vectors `x` and `y` cannot be the same. I.e., one cannot
2689: call `MatMultTranspose`(A,y,y).
2691: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2692: use `MatMultHermitianTranspose()`
2694: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2695: @*/
2696: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2697: {
2698: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2700: PetscFunctionBegin;
2704: VecCheckAssembled(x);
2707: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2708: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2709: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2710: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2711: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2712: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2713: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2714: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2715: MatCheckPreallocated(mat, 1);
2717: if (!mat->ops->multtranspose) {
2718: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2719: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2720: } else op = mat->ops->multtranspose;
2721: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2722: PetscCall(VecLockReadPush(x));
2723: PetscCall((*op)(mat, x, y));
2724: PetscCall(VecLockReadPop(x));
2725: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2726: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2727: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2728: PetscFunctionReturn(PETSC_SUCCESS);
2729: }
2731: /*@
2732: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2734: Neighbor-wise Collective
2736: Input Parameters:
2737: + mat - the matrix
2738: - x - the vector to be multiplied
2740: Output Parameter:
2741: . y - the result
2743: Level: beginner
2745: Notes:
2746: The vectors `x` and `y` cannot be the same. I.e., one cannot
2747: call `MatMultHermitianTranspose`(A,y,y).
2749: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2751: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2753: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2754: @*/
2755: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2756: {
2757: PetscFunctionBegin;
2763: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2764: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2765: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2766: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2767: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2768: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2769: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2770: MatCheckPreallocated(mat, 1);
2772: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2773: #if defined(PETSC_USE_COMPLEX)
2774: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2775: PetscCall(VecLockReadPush(x));
2776: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2777: else PetscUseTypeMethod(mat, mult, x, y);
2778: PetscCall(VecLockReadPop(x));
2779: } else {
2780: Vec w;
2781: PetscCall(VecDuplicate(x, &w));
2782: PetscCall(VecCopy(x, w));
2783: PetscCall(VecConjugate(w));
2784: PetscCall(MatMultTranspose(mat, w, y));
2785: PetscCall(VecDestroy(&w));
2786: PetscCall(VecConjugate(y));
2787: }
2788: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2789: #else
2790: PetscCall(MatMultTranspose(mat, x, y));
2791: #endif
2792: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2793: PetscFunctionReturn(PETSC_SUCCESS);
2794: }
2796: /*@
2797: MatMultAdd - Computes $v3 = v2 + A * v1$.
2799: Neighbor-wise Collective
2801: Input Parameters:
2802: + mat - the matrix
2803: . v1 - the vector to be multiplied by `mat`
2804: - v2 - the vector to be added to the result
2806: Output Parameter:
2807: . v3 - the result
2809: Level: beginner
2811: Note:
2812: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2813: call `MatMultAdd`(A,v1,v2,v1).
2815: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2816: @*/
2817: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2818: {
2819: PetscFunctionBegin;
2826: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2827: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2828: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2829: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2830: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2831: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2832: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2833: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2834: MatCheckPreallocated(mat, 1);
2836: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2837: PetscCall(VecLockReadPush(v1));
2838: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2839: PetscCall(VecLockReadPop(v1));
2840: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2841: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2842: PetscFunctionReturn(PETSC_SUCCESS);
2843: }
2845: /*@
2846: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2848: Neighbor-wise Collective
2850: Input Parameters:
2851: + mat - the matrix
2852: . v1 - the vector to be multiplied by the transpose of the matrix
2853: - v2 - the vector to be added to the result
2855: Output Parameter:
2856: . v3 - the result
2858: Level: beginner
2860: Note:
2861: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2862: call `MatMultTransposeAdd`(A,v1,v2,v1).
2864: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2865: @*/
2866: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2867: {
2868: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2870: PetscFunctionBegin;
2877: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2878: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2879: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2880: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2881: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2882: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2883: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2884: MatCheckPreallocated(mat, 1);
2886: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2887: PetscCall(VecLockReadPush(v1));
2888: PetscCall((*op)(mat, v1, v2, v3));
2889: PetscCall(VecLockReadPop(v1));
2890: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2891: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2892: PetscFunctionReturn(PETSC_SUCCESS);
2893: }
2895: /*@
2896: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2898: Neighbor-wise Collective
2900: Input Parameters:
2901: + mat - the matrix
2902: . v1 - the vector to be multiplied by the Hermitian transpose
2903: - v2 - the vector to be added to the result
2905: Output Parameter:
2906: . v3 - the result
2908: Level: beginner
2910: Note:
2911: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2912: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2914: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2915: @*/
2916: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2917: {
2918: PetscFunctionBegin;
2925: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2926: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2927: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2928: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2929: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2930: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2931: MatCheckPreallocated(mat, 1);
2933: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2934: PetscCall(VecLockReadPush(v1));
2935: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2936: else {
2937: Vec w, z;
2938: PetscCall(VecDuplicate(v1, &w));
2939: PetscCall(VecCopy(v1, w));
2940: PetscCall(VecConjugate(w));
2941: PetscCall(VecDuplicate(v3, &z));
2942: PetscCall(MatMultTranspose(mat, w, z));
2943: PetscCall(VecDestroy(&w));
2944: PetscCall(VecConjugate(z));
2945: if (v2 != v3) {
2946: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2947: } else {
2948: PetscCall(VecAXPY(v3, 1.0, z));
2949: }
2950: PetscCall(VecDestroy(&z));
2951: }
2952: PetscCall(VecLockReadPop(v1));
2953: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2954: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2955: PetscFunctionReturn(PETSC_SUCCESS);
2956: }
2958: /*@
2959: MatGetFactorType - gets the type of factorization a matrix is
2961: Not Collective
2963: Input Parameter:
2964: . mat - the matrix
2966: Output Parameter:
2967: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2969: Level: intermediate
2971: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2972: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2973: @*/
2974: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
2975: {
2976: PetscFunctionBegin;
2979: PetscAssertPointer(t, 2);
2980: *t = mat->factortype;
2981: PetscFunctionReturn(PETSC_SUCCESS);
2982: }
2984: /*@
2985: MatSetFactorType - sets the type of factorization a matrix is
2987: Logically Collective
2989: Input Parameters:
2990: + mat - the matrix
2991: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2993: Level: intermediate
2995: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2996: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2997: @*/
2998: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
2999: {
3000: PetscFunctionBegin;
3003: mat->factortype = t;
3004: PetscFunctionReturn(PETSC_SUCCESS);
3005: }
3007: /*@
3008: MatGetInfo - Returns information about matrix storage (number of
3009: nonzeros, memory, etc.).
3011: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3013: Input Parameters:
3014: + mat - the matrix
3015: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3017: Output Parameter:
3018: . info - matrix information context
3020: Options Database Key:
3021: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3023: Level: intermediate
3025: Notes:
3026: The `MatInfo` context contains a variety of matrix data, including
3027: number of nonzeros allocated and used, number of mallocs during
3028: matrix assembly, etc. Additional information for factored matrices
3029: is provided (such as the fill ratio, number of mallocs during
3030: factorization, etc.).
3032: Example:
3033: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3034: data within the `MatInfo` context. For example,
3035: .vb
3036: MatInfo info;
3037: Mat A;
3038: double mal, nz_a, nz_u;
3040: MatGetInfo(A, MAT_LOCAL, &info);
3041: mal = info.mallocs;
3042: nz_a = info.nz_allocated;
3043: .ve
3045: Fortran Note:
3046: Declare info as a `MatInfo` array of dimension `MAT_INFO_SIZE`, and then extract the parameters
3047: of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
3048: a complete list of parameter names.
3049: .vb
3050: MatInfo info(MAT_INFO_SIZE)
3051: double precision mal, nz_a
3052: Mat A
3053: integer ierr
3055: call MatGetInfo(A, MAT_LOCAL, info, ierr)
3056: mal = info(MAT_INFO_MALLOCS)
3057: nz_a = info(MAT_INFO_NZ_ALLOCATED)
3058: .ve
3060: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3061: @*/
3062: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3063: {
3064: PetscFunctionBegin;
3067: PetscAssertPointer(info, 3);
3068: MatCheckPreallocated(mat, 1);
3069: PetscUseTypeMethod(mat, getinfo, flag, info);
3070: PetscFunctionReturn(PETSC_SUCCESS);
3071: }
3073: /*
3074: This is used by external packages where it is not easy to get the info from the actual
3075: matrix factorization.
3076: */
3077: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3078: {
3079: PetscFunctionBegin;
3080: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3081: PetscFunctionReturn(PETSC_SUCCESS);
3082: }
3084: /*@
3085: MatLUFactor - Performs in-place LU factorization of matrix.
3087: Collective
3089: Input Parameters:
3090: + mat - the matrix
3091: . row - row permutation
3092: . col - column permutation
3093: - info - options for factorization, includes
3094: .vb
3095: fill - expected fill as ratio of original fill.
3096: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3097: Run with the option -info to determine an optimal value to use
3098: .ve
3100: Level: developer
3102: Notes:
3103: Most users should employ the `KSP` interface for linear solvers
3104: instead of working directly with matrix algebra routines such as this.
3105: See, e.g., `KSPCreate()`.
3107: This changes the state of the matrix to a factored matrix; it cannot be used
3108: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3110: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3111: when not using `KSP`.
3113: Developer Note:
3114: The Fortran interface is not autogenerated as the
3115: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3117: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3118: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3119: @*/
3120: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3121: {
3122: MatFactorInfo tinfo;
3124: PetscFunctionBegin;
3128: if (info) PetscAssertPointer(info, 4);
3130: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3131: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3132: MatCheckPreallocated(mat, 1);
3133: if (!info) {
3134: PetscCall(MatFactorInfoInitialize(&tinfo));
3135: info = &tinfo;
3136: }
3138: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3139: PetscUseTypeMethod(mat, lufactor, row, col, info);
3140: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3141: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3142: PetscFunctionReturn(PETSC_SUCCESS);
3143: }
3145: /*@
3146: MatILUFactor - Performs in-place ILU factorization of matrix.
3148: Collective
3150: Input Parameters:
3151: + mat - the matrix
3152: . row - row permutation
3153: . col - column permutation
3154: - info - structure containing
3155: .vb
3156: levels - number of levels of fill.
3157: expected fill - as ratio of original fill.
3158: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3159: missing diagonal entries)
3160: .ve
3162: Level: developer
3164: Notes:
3165: Most users should employ the `KSP` interface for linear solvers
3166: instead of working directly with matrix algebra routines such as this.
3167: See, e.g., `KSPCreate()`.
3169: Probably really in-place only when level of fill is zero, otherwise allocates
3170: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3171: when not using `KSP`.
3173: Developer Note:
3174: The Fortran interface is not autogenerated as the
3175: interface definition cannot be generated correctly [due to MatFactorInfo]
3177: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3178: @*/
3179: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3180: {
3181: PetscFunctionBegin;
3185: PetscAssertPointer(info, 4);
3187: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3188: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3189: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3190: MatCheckPreallocated(mat, 1);
3192: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3193: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3194: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3195: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3196: PetscFunctionReturn(PETSC_SUCCESS);
3197: }
3199: /*@
3200: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3201: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3203: Collective
3205: Input Parameters:
3206: + fact - the factor matrix obtained with `MatGetFactor()`
3207: . mat - the matrix
3208: . row - the row permutation
3209: . col - the column permutation
3210: - info - options for factorization, includes
3211: .vb
3212: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3213: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3214: .ve
3216: Level: developer
3218: Notes:
3219: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3221: Most users should employ the simplified `KSP` interface for linear solvers
3222: instead of working directly with matrix algebra routines such as this.
3223: See, e.g., `KSPCreate()`.
3225: Developer Note:
3226: The Fortran interface is not autogenerated as the
3227: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3229: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3230: @*/
3231: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3232: {
3233: MatFactorInfo tinfo;
3235: PetscFunctionBegin;
3240: if (info) PetscAssertPointer(info, 5);
3243: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3244: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3245: MatCheckPreallocated(mat, 2);
3246: if (!info) {
3247: PetscCall(MatFactorInfoInitialize(&tinfo));
3248: info = &tinfo;
3249: }
3251: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3252: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3253: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3254: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3255: PetscFunctionReturn(PETSC_SUCCESS);
3256: }
3258: /*@
3259: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3260: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3262: Collective
3264: Input Parameters:
3265: + fact - the factor matrix obtained with `MatGetFactor()`
3266: . mat - the matrix
3267: - info - options for factorization
3269: Level: developer
3271: Notes:
3272: See `MatLUFactor()` for in-place factorization. See
3273: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3275: Most users should employ the `KSP` interface for linear solvers
3276: instead of working directly with matrix algebra routines such as this.
3277: See, e.g., `KSPCreate()`.
3279: Developer Note:
3280: The Fortran interface is not autogenerated as the
3281: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3283: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3284: @*/
3285: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3286: {
3287: MatFactorInfo tinfo;
3289: PetscFunctionBegin;
3294: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3295: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3296: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3298: MatCheckPreallocated(mat, 2);
3299: if (!info) {
3300: PetscCall(MatFactorInfoInitialize(&tinfo));
3301: info = &tinfo;
3302: }
3304: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3305: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3306: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3307: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3308: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3309: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3310: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3311: PetscFunctionReturn(PETSC_SUCCESS);
3312: }
3314: /*@
3315: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3316: symmetric matrix.
3318: Collective
3320: Input Parameters:
3321: + mat - the matrix
3322: . perm - row and column permutations
3323: - info - expected fill as ratio of original fill
3325: Level: developer
3327: Notes:
3328: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3329: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3331: Most users should employ the `KSP` interface for linear solvers
3332: instead of working directly with matrix algebra routines such as this.
3333: See, e.g., `KSPCreate()`.
3335: Developer Note:
3336: The Fortran interface is not autogenerated as the
3337: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3339: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3340: `MatGetOrdering()`
3341: @*/
3342: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3343: {
3344: MatFactorInfo tinfo;
3346: PetscFunctionBegin;
3349: if (info) PetscAssertPointer(info, 3);
3351: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3352: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3353: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3354: MatCheckPreallocated(mat, 1);
3355: if (!info) {
3356: PetscCall(MatFactorInfoInitialize(&tinfo));
3357: info = &tinfo;
3358: }
3360: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3361: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3362: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3363: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3364: PetscFunctionReturn(PETSC_SUCCESS);
3365: }
3367: /*@
3368: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3369: of a symmetric matrix.
3371: Collective
3373: Input Parameters:
3374: + fact - the factor matrix obtained with `MatGetFactor()`
3375: . mat - the matrix
3376: . perm - row and column permutations
3377: - info - options for factorization, includes
3378: .vb
3379: fill - expected fill as ratio of original fill.
3380: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3381: Run with the option -info to determine an optimal value to use
3382: .ve
3384: Level: developer
3386: Notes:
3387: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3388: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3390: Most users should employ the `KSP` interface for linear solvers
3391: instead of working directly with matrix algebra routines such as this.
3392: See, e.g., `KSPCreate()`.
3394: Developer Note:
3395: The Fortran interface is not autogenerated as the
3396: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3398: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3399: `MatGetOrdering()`
3400: @*/
3401: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3402: {
3403: MatFactorInfo tinfo;
3405: PetscFunctionBegin;
3409: if (info) PetscAssertPointer(info, 4);
3412: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3413: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3414: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3415: MatCheckPreallocated(mat, 2);
3416: if (!info) {
3417: PetscCall(MatFactorInfoInitialize(&tinfo));
3418: info = &tinfo;
3419: }
3421: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3422: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3423: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3424: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3425: PetscFunctionReturn(PETSC_SUCCESS);
3426: }
3428: /*@
3429: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3430: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3431: `MatCholeskyFactorSymbolic()`.
3433: Collective
3435: Input Parameters:
3436: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3437: . mat - the initial matrix that is to be factored
3438: - info - options for factorization
3440: Level: developer
3442: Note:
3443: Most users should employ the `KSP` interface for linear solvers
3444: instead of working directly with matrix algebra routines such as this.
3445: See, e.g., `KSPCreate()`.
3447: Developer Note:
3448: The Fortran interface is not autogenerated as the
3449: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3451: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3452: @*/
3453: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3454: {
3455: MatFactorInfo tinfo;
3457: PetscFunctionBegin;
3462: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3463: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3464: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3465: MatCheckPreallocated(mat, 2);
3466: if (!info) {
3467: PetscCall(MatFactorInfoInitialize(&tinfo));
3468: info = &tinfo;
3469: }
3471: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3472: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3473: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3474: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3475: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3476: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3477: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3478: PetscFunctionReturn(PETSC_SUCCESS);
3479: }
3481: /*@
3482: MatQRFactor - Performs in-place QR factorization of matrix.
3484: Collective
3486: Input Parameters:
3487: + mat - the matrix
3488: . col - column permutation
3489: - info - options for factorization, includes
3490: .vb
3491: fill - expected fill as ratio of original fill.
3492: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3493: Run with the option -info to determine an optimal value to use
3494: .ve
3496: Level: developer
3498: Notes:
3499: Most users should employ the `KSP` interface for linear solvers
3500: instead of working directly with matrix algebra routines such as this.
3501: See, e.g., `KSPCreate()`.
3503: This changes the state of the matrix to a factored matrix; it cannot be used
3504: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3506: Developer Note:
3507: The Fortran interface is not autogenerated as the
3508: interface definition cannot be generated correctly [due to MatFactorInfo]
3510: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3511: `MatSetUnfactored()`
3512: @*/
3513: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3514: {
3515: PetscFunctionBegin;
3518: if (info) PetscAssertPointer(info, 3);
3520: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3521: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3522: MatCheckPreallocated(mat, 1);
3523: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3524: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3525: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3526: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3527: PetscFunctionReturn(PETSC_SUCCESS);
3528: }
3530: /*@
3531: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3532: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3534: Collective
3536: Input Parameters:
3537: + fact - the factor matrix obtained with `MatGetFactor()`
3538: . mat - the matrix
3539: . col - column permutation
3540: - info - options for factorization, includes
3541: .vb
3542: fill - expected fill as ratio of original fill.
3543: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3544: Run with the option -info to determine an optimal value to use
3545: .ve
3547: Level: developer
3549: Note:
3550: Most users should employ the `KSP` interface for linear solvers
3551: instead of working directly with matrix algebra routines such as this.
3552: See, e.g., `KSPCreate()`.
3554: Developer Note:
3555: The Fortran interface is not autogenerated as the
3556: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3558: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3559: @*/
3560: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3561: {
3562: MatFactorInfo tinfo;
3564: PetscFunctionBegin;
3568: if (info) PetscAssertPointer(info, 4);
3571: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3572: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3573: MatCheckPreallocated(mat, 2);
3574: if (!info) {
3575: PetscCall(MatFactorInfoInitialize(&tinfo));
3576: info = &tinfo;
3577: }
3579: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3580: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3581: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3582: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3583: PetscFunctionReturn(PETSC_SUCCESS);
3584: }
3586: /*@
3587: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3588: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3590: Collective
3592: Input Parameters:
3593: + fact - the factor matrix obtained with `MatGetFactor()`
3594: . mat - the matrix
3595: - info - options for factorization
3597: Level: developer
3599: Notes:
3600: See `MatQRFactor()` for in-place factorization.
3602: Most users should employ the `KSP` interface for linear solvers
3603: instead of working directly with matrix algebra routines such as this.
3604: See, e.g., `KSPCreate()`.
3606: Developer Note:
3607: The Fortran interface is not autogenerated as the
3608: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3610: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3611: @*/
3612: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3613: {
3614: MatFactorInfo tinfo;
3616: PetscFunctionBegin;
3621: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3622: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3623: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3625: MatCheckPreallocated(mat, 2);
3626: if (!info) {
3627: PetscCall(MatFactorInfoInitialize(&tinfo));
3628: info = &tinfo;
3629: }
3631: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3632: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3633: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3634: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3635: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3636: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3637: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3638: PetscFunctionReturn(PETSC_SUCCESS);
3639: }
3641: /*@
3642: MatSolve - Solves $A x = b$, given a factored matrix.
3644: Neighbor-wise Collective
3646: Input Parameters:
3647: + mat - the factored matrix
3648: - b - the right-hand-side vector
3650: Output Parameter:
3651: . x - the result vector
3653: Level: developer
3655: Notes:
3656: The vectors `b` and `x` cannot be the same. I.e., one cannot
3657: call `MatSolve`(A,x,x).
3659: Most users should employ the `KSP` interface for linear solvers
3660: instead of working directly with matrix algebra routines such as this.
3661: See, e.g., `KSPCreate()`.
3663: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3664: @*/
3665: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3666: {
3667: PetscFunctionBegin;
3672: PetscCheckSameComm(mat, 1, b, 2);
3673: PetscCheckSameComm(mat, 1, x, 3);
3674: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3675: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3676: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3677: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3678: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3679: MatCheckPreallocated(mat, 1);
3681: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3682: PetscCall(VecFlag(x, mat->factorerrortype));
3683: if (mat->factorerrortype) {
3684: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3685: } else PetscUseTypeMethod(mat, solve, b, x);
3686: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3687: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3688: PetscFunctionReturn(PETSC_SUCCESS);
3689: }
3691: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3692: {
3693: Vec b, x;
3694: PetscInt N, i;
3695: PetscErrorCode (*f)(Mat, Vec, Vec);
3696: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3698: PetscFunctionBegin;
3699: if (A->factorerrortype) {
3700: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3701: PetscCall(MatSetInf(X));
3702: PetscFunctionReturn(PETSC_SUCCESS);
3703: }
3704: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3705: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3706: PetscCall(MatBoundToCPU(A, &Abound));
3707: if (!Abound) {
3708: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3709: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3710: }
3711: #if PetscDefined(HAVE_CUDA)
3712: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3713: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3714: #elif PetscDefined(HAVE_HIP)
3715: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3716: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3717: #endif
3718: PetscCall(MatGetSize(B, NULL, &N));
3719: for (i = 0; i < N; i++) {
3720: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3721: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3722: PetscCall((*f)(A, b, x));
3723: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3724: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3725: }
3726: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3727: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3728: PetscFunctionReturn(PETSC_SUCCESS);
3729: }
3731: /*@
3732: MatMatSolve - Solves $A X = B$, given a factored matrix.
3734: Neighbor-wise Collective
3736: Input Parameters:
3737: + A - the factored matrix
3738: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3740: Output Parameter:
3741: . X - the result matrix (dense matrix)
3743: Level: developer
3745: Note:
3746: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3747: otherwise, `B` and `X` cannot be the same.
3749: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3750: @*/
3751: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3752: {
3753: PetscFunctionBegin;
3758: PetscCheckSameComm(A, 1, B, 2);
3759: PetscCheckSameComm(A, 1, X, 3);
3760: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3761: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3762: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3763: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3764: MatCheckPreallocated(A, 1);
3766: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3767: if (!A->ops->matsolve) {
3768: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3769: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3770: } else PetscUseTypeMethod(A, matsolve, B, X);
3771: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3772: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3773: PetscFunctionReturn(PETSC_SUCCESS);
3774: }
3776: /*@
3777: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3779: Neighbor-wise Collective
3781: Input Parameters:
3782: + A - the factored matrix
3783: - B - the right-hand-side matrix (`MATDENSE` matrix)
3785: Output Parameter:
3786: . X - the result matrix (dense matrix)
3788: Level: developer
3790: Note:
3791: The matrices `B` and `X` cannot be the same. I.e., one cannot
3792: call `MatMatSolveTranspose`(A,X,X).
3794: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3795: @*/
3796: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3797: {
3798: PetscFunctionBegin;
3803: PetscCheckSameComm(A, 1, B, 2);
3804: PetscCheckSameComm(A, 1, X, 3);
3805: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3806: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3807: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3808: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3809: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3810: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3811: MatCheckPreallocated(A, 1);
3813: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3814: if (!A->ops->matsolvetranspose) {
3815: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3816: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3817: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3818: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3819: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3820: PetscFunctionReturn(PETSC_SUCCESS);
3821: }
3823: /*@
3824: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3826: Neighbor-wise Collective
3828: Input Parameters:
3829: + A - the factored matrix
3830: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3832: Output Parameter:
3833: . X - the result matrix (dense matrix)
3835: Level: developer
3837: Note:
3838: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3839: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3841: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3842: @*/
3843: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3844: {
3845: PetscFunctionBegin;
3850: PetscCheckSameComm(A, 1, Bt, 2);
3851: PetscCheckSameComm(A, 1, X, 3);
3853: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3854: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3855: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3856: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3857: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3858: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3859: MatCheckPreallocated(A, 1);
3861: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3862: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3863: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3864: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3865: PetscFunctionReturn(PETSC_SUCCESS);
3866: }
3868: /*@
3869: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3870: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3872: Neighbor-wise Collective
3874: Input Parameters:
3875: + mat - the factored matrix
3876: - b - the right-hand-side vector
3878: Output Parameter:
3879: . x - the result vector
3881: Level: developer
3883: Notes:
3884: `MatSolve()` should be used for most applications, as it performs
3885: a forward solve followed by a backward solve.
3887: The vectors `b` and `x` cannot be the same, i.e., one cannot
3888: call `MatForwardSolve`(A,x,x).
3890: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3891: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3892: `MatForwardSolve()` solves $U^T*D y = b$, and
3893: `MatBackwardSolve()` solves $U x = y$.
3894: Thus they do not provide a symmetric preconditioner.
3896: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3897: @*/
3898: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3899: {
3900: PetscFunctionBegin;
3905: PetscCheckSameComm(mat, 1, b, 2);
3906: PetscCheckSameComm(mat, 1, x, 3);
3907: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3908: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3909: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3910: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3911: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3912: MatCheckPreallocated(mat, 1);
3914: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3915: PetscUseTypeMethod(mat, forwardsolve, b, x);
3916: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3917: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3918: PetscFunctionReturn(PETSC_SUCCESS);
3919: }
3921: /*@
3922: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3923: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3925: Neighbor-wise Collective
3927: Input Parameters:
3928: + mat - the factored matrix
3929: - b - the right-hand-side vector
3931: Output Parameter:
3932: . x - the result vector
3934: Level: developer
3936: Notes:
3937: `MatSolve()` should be used for most applications, as it performs
3938: a forward solve followed by a backward solve.
3940: The vectors `b` and `x` cannot be the same. I.e., one cannot
3941: call `MatBackwardSolve`(A,x,x).
3943: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3944: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3945: `MatForwardSolve()` solves $U^T*D y = b$, and
3946: `MatBackwardSolve()` solves $U x = y$.
3947: Thus they do not provide a symmetric preconditioner.
3949: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3950: @*/
3951: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3952: {
3953: PetscFunctionBegin;
3958: PetscCheckSameComm(mat, 1, b, 2);
3959: PetscCheckSameComm(mat, 1, x, 3);
3960: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3961: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3962: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3963: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3964: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3965: MatCheckPreallocated(mat, 1);
3967: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3968: PetscUseTypeMethod(mat, backwardsolve, b, x);
3969: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3970: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3971: PetscFunctionReturn(PETSC_SUCCESS);
3972: }
3974: /*@
3975: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
3977: Neighbor-wise Collective
3979: Input Parameters:
3980: + mat - the factored matrix
3981: . b - the right-hand-side vector
3982: - y - the vector to be added to
3984: Output Parameter:
3985: . x - the result vector
3987: Level: developer
3989: Note:
3990: The vectors `b` and `x` cannot be the same. I.e., one cannot
3991: call `MatSolveAdd`(A,x,y,x).
3993: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3994: @*/
3995: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
3996: {
3997: PetscScalar one = 1.0;
3998: Vec tmp;
4000: PetscFunctionBegin;
4006: PetscCheckSameComm(mat, 1, b, 2);
4007: PetscCheckSameComm(mat, 1, y, 3);
4008: PetscCheckSameComm(mat, 1, x, 4);
4009: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4010: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4011: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4012: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4013: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4014: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4015: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4016: MatCheckPreallocated(mat, 1);
4018: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4019: PetscCall(VecFlag(x, mat->factorerrortype));
4020: if (mat->factorerrortype) {
4021: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4022: } else if (mat->ops->solveadd) {
4023: PetscUseTypeMethod(mat, solveadd, b, y, x);
4024: } else {
4025: /* do the solve then the add manually */
4026: if (x != y) {
4027: PetscCall(MatSolve(mat, b, x));
4028: PetscCall(VecAXPY(x, one, y));
4029: } else {
4030: PetscCall(VecDuplicate(x, &tmp));
4031: PetscCall(VecCopy(x, tmp));
4032: PetscCall(MatSolve(mat, b, x));
4033: PetscCall(VecAXPY(x, one, tmp));
4034: PetscCall(VecDestroy(&tmp));
4035: }
4036: }
4037: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4038: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4039: PetscFunctionReturn(PETSC_SUCCESS);
4040: }
4042: /*@
4043: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4045: Neighbor-wise Collective
4047: Input Parameters:
4048: + mat - the factored matrix
4049: - b - the right-hand-side vector
4051: Output Parameter:
4052: . x - the result vector
4054: Level: developer
4056: Notes:
4057: The vectors `b` and `x` cannot be the same. I.e., one cannot
4058: call `MatSolveTranspose`(A,x,x).
4060: Most users should employ the `KSP` interface for linear solvers
4061: instead of working directly with matrix algebra routines such as this.
4062: See, e.g., `KSPCreate()`.
4064: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4065: @*/
4066: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4067: {
4068: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4070: PetscFunctionBegin;
4075: PetscCheckSameComm(mat, 1, b, 2);
4076: PetscCheckSameComm(mat, 1, x, 3);
4077: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4078: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4079: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4080: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4081: MatCheckPreallocated(mat, 1);
4082: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4083: PetscCall(VecFlag(x, mat->factorerrortype));
4084: if (mat->factorerrortype) {
4085: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4086: } else {
4087: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4088: PetscCall((*f)(mat, b, x));
4089: }
4090: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4091: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4092: PetscFunctionReturn(PETSC_SUCCESS);
4093: }
4095: /*@
4096: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4097: factored matrix.
4099: Neighbor-wise Collective
4101: Input Parameters:
4102: + mat - the factored matrix
4103: . b - the right-hand-side vector
4104: - y - the vector to be added to
4106: Output Parameter:
4107: . x - the result vector
4109: Level: developer
4111: Note:
4112: The vectors `b` and `x` cannot be the same. I.e., one cannot
4113: call `MatSolveTransposeAdd`(A,x,y,x).
4115: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4116: @*/
4117: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4118: {
4119: PetscScalar one = 1.0;
4120: Vec tmp;
4121: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4123: PetscFunctionBegin;
4129: PetscCheckSameComm(mat, 1, b, 2);
4130: PetscCheckSameComm(mat, 1, y, 3);
4131: PetscCheckSameComm(mat, 1, x, 4);
4132: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4133: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4134: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4135: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4136: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4137: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4138: MatCheckPreallocated(mat, 1);
4140: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4141: PetscCall(VecFlag(x, mat->factorerrortype));
4142: if (mat->factorerrortype) {
4143: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4144: } else if (f) {
4145: PetscCall((*f)(mat, b, y, x));
4146: } else {
4147: /* do the solve then the add manually */
4148: if (x != y) {
4149: PetscCall(MatSolveTranspose(mat, b, x));
4150: PetscCall(VecAXPY(x, one, y));
4151: } else {
4152: PetscCall(VecDuplicate(x, &tmp));
4153: PetscCall(VecCopy(x, tmp));
4154: PetscCall(MatSolveTranspose(mat, b, x));
4155: PetscCall(VecAXPY(x, one, tmp));
4156: PetscCall(VecDestroy(&tmp));
4157: }
4158: }
4159: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4160: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4161: PetscFunctionReturn(PETSC_SUCCESS);
4162: }
4164: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4165: /*@
4166: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4168: Neighbor-wise Collective
4170: Input Parameters:
4171: + mat - the matrix
4172: . b - the right-hand side
4173: . omega - the relaxation factor
4174: . flag - flag indicating the type of SOR (see below)
4175: . shift - diagonal shift
4176: . its - the number of iterations
4177: - lits - the number of local iterations
4179: Output Parameter:
4180: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4182: SOR Flags:
4183: + `SOR_FORWARD_SWEEP` - forward SOR
4184: . `SOR_BACKWARD_SWEEP` - backward SOR
4185: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4186: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4187: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4188: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4189: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4190: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4191: upper/lower triangular part of matrix to
4192: vector (with omega)
4193: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4195: Level: developer
4197: Notes:
4198: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4199: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4200: on each processor.
4202: Application programmers will not generally use `MatSOR()` directly,
4203: but instead will employ the `KSP`/`PC` interface.
4205: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4207: Most users should employ the `KSP` interface for linear solvers
4208: instead of working directly with matrix algebra routines such as this.
4209: See, e.g., `KSPCreate()`.
4211: Vectors `x` and `b` CANNOT be the same
4213: The flags are implemented as bitwise inclusive or operations.
4214: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4215: to specify a zero initial guess for SSOR.
4217: Developer Note:
4218: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4220: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4221: @*/
4222: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4223: {
4224: PetscFunctionBegin;
4229: PetscCheckSameComm(mat, 1, b, 2);
4230: PetscCheckSameComm(mat, 1, x, 8);
4231: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4232: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4233: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4234: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4235: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4236: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4237: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4238: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4240: MatCheckPreallocated(mat, 1);
4241: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4242: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4243: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4244: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4245: PetscFunctionReturn(PETSC_SUCCESS);
4246: }
4248: /*
4249: Default matrix copy routine.
4250: */
4251: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4252: {
4253: PetscInt i, rstart = 0, rend = 0, nz;
4254: const PetscInt *cwork;
4255: const PetscScalar *vwork;
4257: PetscFunctionBegin;
4258: if (B->assembled) PetscCall(MatZeroEntries(B));
4259: if (str == SAME_NONZERO_PATTERN) {
4260: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4261: for (i = rstart; i < rend; i++) {
4262: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4263: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4264: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4265: }
4266: } else {
4267: PetscCall(MatAYPX(B, 0.0, A, str));
4268: }
4269: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4270: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4271: PetscFunctionReturn(PETSC_SUCCESS);
4272: }
4274: /*@
4275: MatCopy - Copies a matrix to another matrix.
4277: Collective
4279: Input Parameters:
4280: + A - the matrix
4281: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4283: Output Parameter:
4284: . B - where the copy is put
4286: Level: intermediate
4288: Notes:
4289: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4291: `MatCopy()` copies the matrix entries of a matrix to another existing
4292: matrix (after first zeroing the second matrix). A related routine is
4293: `MatConvert()`, which first creates a new matrix and then copies the data.
4295: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4296: @*/
4297: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4298: {
4299: PetscInt i;
4301: PetscFunctionBegin;
4306: PetscCheckSameComm(A, 1, B, 2);
4307: MatCheckPreallocated(B, 2);
4308: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4309: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4310: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4311: A->cmap->N, B->cmap->N);
4312: MatCheckPreallocated(A, 1);
4313: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4315: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4316: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4317: else PetscCall(MatCopy_Basic(A, B, str));
4319: B->stencil.dim = A->stencil.dim;
4320: B->stencil.noc = A->stencil.noc;
4321: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4322: B->stencil.dims[i] = A->stencil.dims[i];
4323: B->stencil.starts[i] = A->stencil.starts[i];
4324: }
4326: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4327: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4328: PetscFunctionReturn(PETSC_SUCCESS);
4329: }
4331: /*@
4332: MatConvert - Converts a matrix to another matrix, either of the same
4333: or different type.
4335: Collective
4337: Input Parameters:
4338: + mat - the matrix
4339: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4340: same type as the original matrix.
4341: - reuse - denotes if the destination matrix is to be created or reused.
4342: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4343: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4345: Output Parameter:
4346: . M - pointer to place new matrix
4348: Level: intermediate
4350: Notes:
4351: `MatConvert()` first creates a new matrix and then copies the data from
4352: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4353: entries of one matrix to another already existing matrix context.
4355: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4356: the MPI communicator of the generated matrix is always the same as the communicator
4357: of the input matrix.
4359: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4360: @*/
4361: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4362: {
4363: PetscBool sametype, issame, flg;
4364: PetscBool3 issymmetric, ishermitian;
4365: char convname[256], mtype[256];
4366: Mat B;
4368: PetscFunctionBegin;
4371: PetscAssertPointer(M, 4);
4372: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4373: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4374: MatCheckPreallocated(mat, 1);
4376: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4377: if (flg) newtype = mtype;
4379: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4380: PetscCall(PetscStrcmp(newtype, "same", &issame));
4381: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4382: if (reuse == MAT_REUSE_MATRIX) {
4384: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4385: }
4387: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4388: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4389: PetscFunctionReturn(PETSC_SUCCESS);
4390: }
4392: /* Cache Mat options because some converters use MatHeaderReplace */
4393: issymmetric = mat->symmetric;
4394: ishermitian = mat->hermitian;
4396: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4397: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4398: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4399: } else {
4400: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4401: const char *prefix[3] = {"seq", "mpi", ""};
4402: PetscInt i;
4403: /*
4404: Order of precedence:
4405: 0) See if newtype is a superclass of the current matrix.
4406: 1) See if a specialized converter is known to the current matrix.
4407: 2) See if a specialized converter is known to the desired matrix class.
4408: 3) See if a good general converter is registered for the desired class
4409: (as of 6/27/03 only MATMPIADJ falls into this category).
4410: 4) See if a good general converter is known for the current matrix.
4411: 5) Use a really basic converter.
4412: */
4414: /* 0) See if newtype is a superclass of the current matrix.
4415: i.e mat is mpiaij and newtype is aij */
4416: for (i = 0; i < 2; i++) {
4417: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4418: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4419: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4420: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4421: if (flg) {
4422: if (reuse == MAT_INPLACE_MATRIX) {
4423: PetscCall(PetscInfo(mat, "Early return\n"));
4424: PetscFunctionReturn(PETSC_SUCCESS);
4425: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4426: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4427: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4428: PetscFunctionReturn(PETSC_SUCCESS);
4429: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4430: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4431: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4432: PetscFunctionReturn(PETSC_SUCCESS);
4433: }
4434: }
4435: }
4436: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4437: for (i = 0; i < 3; i++) {
4438: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4439: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4440: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4441: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4442: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4443: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4444: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4445: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4446: if (conv) goto foundconv;
4447: }
4449: /* 2) See if a specialized converter is known to the desired matrix class. */
4450: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4451: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4452: PetscCall(MatSetType(B, newtype));
4453: for (i = 0; i < 3; i++) {
4454: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4455: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4456: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4457: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4458: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4459: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4460: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4461: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4462: if (conv) {
4463: PetscCall(MatDestroy(&B));
4464: goto foundconv;
4465: }
4466: }
4468: /* 3) See if a good general converter is registered for the desired class */
4469: conv = B->ops->convertfrom;
4470: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4471: PetscCall(MatDestroy(&B));
4472: if (conv) goto foundconv;
4474: /* 4) See if a good general converter is known for the current matrix */
4475: if (mat->ops->convert) conv = mat->ops->convert;
4476: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4477: if (conv) goto foundconv;
4479: /* 5) Use a really basic converter. */
4480: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4481: conv = MatConvert_Basic;
4483: foundconv:
4484: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4485: PetscCall((*conv)(mat, newtype, reuse, M));
4486: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4487: /* the block sizes must be same if the mappings are copied over */
4488: (*M)->rmap->bs = mat->rmap->bs;
4489: (*M)->cmap->bs = mat->cmap->bs;
4490: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4491: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4492: (*M)->rmap->mapping = mat->rmap->mapping;
4493: (*M)->cmap->mapping = mat->cmap->mapping;
4494: }
4495: (*M)->stencil.dim = mat->stencil.dim;
4496: (*M)->stencil.noc = mat->stencil.noc;
4497: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4498: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4499: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4500: }
4501: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4502: }
4503: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4505: /* Copy Mat options */
4506: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4507: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4508: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4509: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4510: PetscFunctionReturn(PETSC_SUCCESS);
4511: }
4513: /*@
4514: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4516: Not Collective
4518: Input Parameter:
4519: . mat - the matrix, must be a factored matrix
4521: Output Parameter:
4522: . type - the string name of the package (do not free this string)
4524: Level: intermediate
4526: Fortran Note:
4527: Pass in an empty string that is long enough and the package name will be copied into it.
4529: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4530: @*/
4531: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4532: {
4533: PetscErrorCode (*conv)(Mat, MatSolverType *);
4535: PetscFunctionBegin;
4538: PetscAssertPointer(type, 2);
4539: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4540: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4541: if (conv) PetscCall((*conv)(mat, type));
4542: else *type = MATSOLVERPETSC;
4543: PetscFunctionReturn(PETSC_SUCCESS);
4544: }
4546: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4547: struct _MatSolverTypeForSpecifcType {
4548: MatType mtype;
4549: /* no entry for MAT_FACTOR_NONE */
4550: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4551: MatSolverTypeForSpecifcType next;
4552: };
4554: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4555: struct _MatSolverTypeHolder {
4556: char *name;
4557: MatSolverTypeForSpecifcType handlers;
4558: MatSolverTypeHolder next;
4559: };
4561: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4563: /*@C
4564: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4566: Logically Collective, No Fortran Support
4568: Input Parameters:
4569: + package - name of the package, for example petsc or superlu
4570: . mtype - the matrix type that works with this package
4571: . ftype - the type of factorization supported by the package
4572: - createfactor - routine that will create the factored matrix ready to be used
4574: Level: developer
4576: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4577: `MatGetFactor()`
4578: @*/
4579: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4580: {
4581: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4582: PetscBool flg;
4583: MatSolverTypeForSpecifcType inext, iprev = NULL;
4585: PetscFunctionBegin;
4586: PetscCall(MatInitializePackage());
4587: if (!next) {
4588: PetscCall(PetscNew(&MatSolverTypeHolders));
4589: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4590: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4591: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4592: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4593: PetscFunctionReturn(PETSC_SUCCESS);
4594: }
4595: while (next) {
4596: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4597: if (flg) {
4598: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4599: inext = next->handlers;
4600: while (inext) {
4601: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4602: if (flg) {
4603: inext->createfactor[(int)ftype - 1] = createfactor;
4604: PetscFunctionReturn(PETSC_SUCCESS);
4605: }
4606: iprev = inext;
4607: inext = inext->next;
4608: }
4609: PetscCall(PetscNew(&iprev->next));
4610: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4611: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4612: PetscFunctionReturn(PETSC_SUCCESS);
4613: }
4614: prev = next;
4615: next = next->next;
4616: }
4617: PetscCall(PetscNew(&prev->next));
4618: PetscCall(PetscStrallocpy(package, &prev->next->name));
4619: PetscCall(PetscNew(&prev->next->handlers));
4620: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4621: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4622: PetscFunctionReturn(PETSC_SUCCESS);
4623: }
4625: /*@C
4626: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4628: Input Parameters:
4629: + type - name of the package, for example petsc or superlu, if this is 'NULL', then the first result that satisfies the other criteria is returned
4630: . ftype - the type of factorization supported by the type
4631: - mtype - the matrix type that works with this type
4633: Output Parameters:
4634: + foundtype - `PETSC_TRUE` if the type was registered
4635: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4636: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4638: Calling sequence of `createfactor`:
4639: + A - the matrix providing the factor matrix
4640: . ftype - the `MatFactorType` of the factor requested
4641: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4643: Level: developer
4645: Note:
4646: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4647: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4648: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4650: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4651: `MatInitializePackage()`
4652: @*/
4653: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4654: {
4655: MatSolverTypeHolder next = MatSolverTypeHolders;
4656: PetscBool flg;
4657: MatSolverTypeForSpecifcType inext;
4659: PetscFunctionBegin;
4660: if (foundtype) *foundtype = PETSC_FALSE;
4661: if (foundmtype) *foundmtype = PETSC_FALSE;
4662: if (createfactor) *createfactor = NULL;
4664: if (type) {
4665: while (next) {
4666: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4667: if (flg) {
4668: if (foundtype) *foundtype = PETSC_TRUE;
4669: inext = next->handlers;
4670: while (inext) {
4671: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4672: if (flg) {
4673: if (foundmtype) *foundmtype = PETSC_TRUE;
4674: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4675: PetscFunctionReturn(PETSC_SUCCESS);
4676: }
4677: inext = inext->next;
4678: }
4679: }
4680: next = next->next;
4681: }
4682: } else {
4683: while (next) {
4684: inext = next->handlers;
4685: while (inext) {
4686: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4687: if (flg && inext->createfactor[(int)ftype - 1]) {
4688: if (foundtype) *foundtype = PETSC_TRUE;
4689: if (foundmtype) *foundmtype = PETSC_TRUE;
4690: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4691: PetscFunctionReturn(PETSC_SUCCESS);
4692: }
4693: inext = inext->next;
4694: }
4695: next = next->next;
4696: }
4697: /* try with base classes inext->mtype */
4698: next = MatSolverTypeHolders;
4699: while (next) {
4700: inext = next->handlers;
4701: while (inext) {
4702: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4703: if (flg && inext->createfactor[(int)ftype - 1]) {
4704: if (foundtype) *foundtype = PETSC_TRUE;
4705: if (foundmtype) *foundmtype = PETSC_TRUE;
4706: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4707: PetscFunctionReturn(PETSC_SUCCESS);
4708: }
4709: inext = inext->next;
4710: }
4711: next = next->next;
4712: }
4713: }
4714: PetscFunctionReturn(PETSC_SUCCESS);
4715: }
4717: PetscErrorCode MatSolverTypeDestroy(void)
4718: {
4719: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4720: MatSolverTypeForSpecifcType inext, iprev;
4722: PetscFunctionBegin;
4723: while (next) {
4724: PetscCall(PetscFree(next->name));
4725: inext = next->handlers;
4726: while (inext) {
4727: PetscCall(PetscFree(inext->mtype));
4728: iprev = inext;
4729: inext = inext->next;
4730: PetscCall(PetscFree(iprev));
4731: }
4732: prev = next;
4733: next = next->next;
4734: PetscCall(PetscFree(prev));
4735: }
4736: MatSolverTypeHolders = NULL;
4737: PetscFunctionReturn(PETSC_SUCCESS);
4738: }
4740: /*@
4741: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4743: Logically Collective
4745: Input Parameter:
4746: . mat - the matrix
4748: Output Parameter:
4749: . flg - `PETSC_TRUE` if uses the ordering
4751: Level: developer
4753: Note:
4754: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4755: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4757: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4758: @*/
4759: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4760: {
4761: PetscFunctionBegin;
4762: *flg = mat->canuseordering;
4763: PetscFunctionReturn(PETSC_SUCCESS);
4764: }
4766: /*@
4767: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4769: Logically Collective
4771: Input Parameters:
4772: + mat - the matrix obtained with `MatGetFactor()`
4773: - ftype - the factorization type to be used
4775: Output Parameter:
4776: . otype - the preferred ordering type
4778: Level: developer
4780: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4781: @*/
4782: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4783: {
4784: PetscFunctionBegin;
4785: *otype = mat->preferredordering[ftype];
4786: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4787: PetscFunctionReturn(PETSC_SUCCESS);
4788: }
4790: /*@
4791: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()
4793: Collective
4795: Input Parameters:
4796: + mat - the matrix
4797: . type - name of solver type, for example, superlu, petsc (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4798: the other criteria is returned
4799: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4801: Output Parameter:
4802: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4804: Options Database Keys:
4805: + -pc_factor_mat_solver_type <type> - choose the type at run time. When using `KSP` solvers
4806: - -mat_factor_bind_factorization <host, device> - Where to do matrix factorization? Default is device (might consume more device memory.
4807: One can choose host to save device memory). Currently only supported with `MATSEQAIJCUSPARSE` matrices.
4809: Level: intermediate
4811: Notes:
4812: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4813: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4815: Users usually access the factorization solvers via `KSP`
4817: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4818: such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir
4820: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4821: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4822: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4824: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4825: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4826: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4828: Developer Note:
4829: This should actually be called `MatCreateFactor()` since it creates a new factor object
4831: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4832: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4833: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4834: @*/
4835: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4836: {
4837: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4838: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4840: PetscFunctionBegin;
4844: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4845: MatCheckPreallocated(mat, 1);
4847: PetscCall(MatIsShell(mat, &shell));
4848: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4849: if (hasop) {
4850: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4851: PetscFunctionReturn(PETSC_SUCCESS);
4852: }
4854: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4855: if (!foundtype) {
4856: if (type) {
4857: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4858: ((PetscObject)mat)->type_name, type);
4859: } else {
4860: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4861: }
4862: }
4863: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4864: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4866: PetscCall((*conv)(mat, ftype, f));
4867: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4868: PetscFunctionReturn(PETSC_SUCCESS);
4869: }
4871: /*@
4872: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4874: Not Collective
4876: Input Parameters:
4877: + mat - the matrix
4878: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4879: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4881: Output Parameter:
4882: . flg - PETSC_TRUE if the factorization is available
4884: Level: intermediate
4886: Notes:
4887: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4888: such as pastix, superlu, mumps etc.
4890: PETSc must have been ./configure to use the external solver, using the option --download-package
4892: Developer Note:
4893: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4895: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4896: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4897: @*/
4898: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4899: {
4900: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4902: PetscFunctionBegin;
4904: PetscAssertPointer(flg, 4);
4906: *flg = PETSC_FALSE;
4907: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
4909: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4910: MatCheckPreallocated(mat, 1);
4912: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4913: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4914: PetscFunctionReturn(PETSC_SUCCESS);
4915: }
4917: /*@
4918: MatDuplicate - Duplicates a matrix including the non-zero structure.
4920: Collective
4922: Input Parameters:
4923: + mat - the matrix
4924: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4925: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4927: Output Parameter:
4928: . M - pointer to place new matrix
4930: Level: intermediate
4932: Notes:
4933: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
4935: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
4937: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4939: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4940: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4941: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
4943: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4944: @*/
4945: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4946: {
4947: Mat B;
4948: VecType vtype;
4949: PetscInt i;
4950: PetscObject dm, container_h, container_d;
4951: void (*viewf)(void);
4953: PetscFunctionBegin;
4956: PetscAssertPointer(M, 3);
4957: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4958: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4959: MatCheckPreallocated(mat, 1);
4961: *M = NULL;
4962: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4963: PetscUseTypeMethod(mat, duplicate, op, M);
4964: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4965: B = *M;
4967: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4968: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4969: PetscCall(MatGetVecType(mat, &vtype));
4970: PetscCall(MatSetVecType(B, vtype));
4972: B->stencil.dim = mat->stencil.dim;
4973: B->stencil.noc = mat->stencil.noc;
4974: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4975: B->stencil.dims[i] = mat->stencil.dims[i];
4976: B->stencil.starts[i] = mat->stencil.starts[i];
4977: }
4979: B->nooffproczerorows = mat->nooffproczerorows;
4980: B->nooffprocentries = mat->nooffprocentries;
4982: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4983: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4984: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
4985: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
4986: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
4987: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
4988: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
4989: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4990: PetscFunctionReturn(PETSC_SUCCESS);
4991: }
4993: /*@
4994: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
4996: Logically Collective
4998: Input Parameter:
4999: . mat - the matrix
5001: Output Parameter:
5002: . v - the diagonal of the matrix
5004: Level: intermediate
5006: Note:
5007: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
5008: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
5009: is larger than `ndiag`, the values of the remaining entries are unspecified.
5011: Currently only correct in parallel for square matrices.
5013: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
5014: @*/
5015: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
5016: {
5017: PetscFunctionBegin;
5021: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5022: MatCheckPreallocated(mat, 1);
5023: if (PetscDefined(USE_DEBUG)) {
5024: PetscInt nv, row, col, ndiag;
5026: PetscCall(VecGetLocalSize(v, &nv));
5027: PetscCall(MatGetLocalSize(mat, &row, &col));
5028: ndiag = PetscMin(row, col);
5029: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5030: }
5032: PetscUseTypeMethod(mat, getdiagonal, v);
5033: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5034: PetscFunctionReturn(PETSC_SUCCESS);
5035: }
5037: /*@
5038: MatGetRowMin - Gets the minimum value (of the real part) of each
5039: row of the matrix
5041: Logically Collective
5043: Input Parameter:
5044: . mat - the matrix
5046: Output Parameters:
5047: + v - the vector for storing the maximums
5048: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5050: Level: intermediate
5052: Note:
5053: The result of this call are the same as if one converted the matrix to dense format
5054: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5056: This code is only implemented for a couple of matrix formats.
5058: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5059: `MatGetRowMax()`
5060: @*/
5061: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5062: {
5063: PetscFunctionBegin;
5067: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5069: if (!mat->cmap->N) {
5070: PetscCall(VecSet(v, PETSC_MAX_REAL));
5071: if (idx) {
5072: PetscInt i, m = mat->rmap->n;
5073: for (i = 0; i < m; i++) idx[i] = -1;
5074: }
5075: } else {
5076: MatCheckPreallocated(mat, 1);
5077: }
5078: PetscUseTypeMethod(mat, getrowmin, v, idx);
5079: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5080: PetscFunctionReturn(PETSC_SUCCESS);
5081: }
5083: /*@
5084: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5085: row of the matrix
5087: Logically Collective
5089: Input Parameter:
5090: . mat - the matrix
5092: Output Parameters:
5093: + v - the vector for storing the minimums
5094: - idx - the indices of the column found for each row (or `NULL` if not needed)
5096: Level: intermediate
5098: Notes:
5099: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5100: row is 0 (the first column).
5102: This code is only implemented for a couple of matrix formats.
5104: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5105: @*/
5106: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5107: {
5108: PetscFunctionBegin;
5112: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5113: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5115: if (!mat->cmap->N) {
5116: PetscCall(VecSet(v, 0.0));
5117: if (idx) {
5118: PetscInt i, m = mat->rmap->n;
5119: for (i = 0; i < m; i++) idx[i] = -1;
5120: }
5121: } else {
5122: MatCheckPreallocated(mat, 1);
5123: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5124: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5125: }
5126: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5127: PetscFunctionReturn(PETSC_SUCCESS);
5128: }
5130: /*@
5131: MatGetRowMax - Gets the maximum value (of the real part) of each
5132: row of the matrix
5134: Logically Collective
5136: Input Parameter:
5137: . mat - the matrix
5139: Output Parameters:
5140: + v - the vector for storing the maximums
5141: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5143: Level: intermediate
5145: Notes:
5146: The result of this call are the same as if one converted the matrix to dense format
5147: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5149: This code is only implemented for a couple of matrix formats.
5151: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5152: @*/
5153: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5154: {
5155: PetscFunctionBegin;
5159: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5161: if (!mat->cmap->N) {
5162: PetscCall(VecSet(v, PETSC_MIN_REAL));
5163: if (idx) {
5164: PetscInt i, m = mat->rmap->n;
5165: for (i = 0; i < m; i++) idx[i] = -1;
5166: }
5167: } else {
5168: MatCheckPreallocated(mat, 1);
5169: PetscUseTypeMethod(mat, getrowmax, v, idx);
5170: }
5171: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5172: PetscFunctionReturn(PETSC_SUCCESS);
5173: }
5175: /*@
5176: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5177: row of the matrix
5179: Logically Collective
5181: Input Parameter:
5182: . mat - the matrix
5184: Output Parameters:
5185: + v - the vector for storing the maximums
5186: - idx - the indices of the column found for each row (or `NULL` if not needed)
5188: Level: intermediate
5190: Notes:
5191: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5192: row is 0 (the first column).
5194: This code is only implemented for a couple of matrix formats.
5196: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5197: @*/
5198: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5199: {
5200: PetscFunctionBegin;
5204: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5206: if (!mat->cmap->N) {
5207: PetscCall(VecSet(v, 0.0));
5208: if (idx) {
5209: PetscInt i, m = mat->rmap->n;
5210: for (i = 0; i < m; i++) idx[i] = -1;
5211: }
5212: } else {
5213: MatCheckPreallocated(mat, 1);
5214: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5215: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5216: }
5217: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5218: PetscFunctionReturn(PETSC_SUCCESS);
5219: }
5221: /*@
5222: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5224: Logically Collective
5226: Input Parameter:
5227: . mat - the matrix
5229: Output Parameter:
5230: . v - the vector for storing the sum
5232: Level: intermediate
5234: This code is only implemented for a couple of matrix formats.
5236: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5237: @*/
5238: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5239: {
5240: PetscFunctionBegin;
5244: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5246: if (!mat->cmap->N) {
5247: PetscCall(VecSet(v, 0.0));
5248: } else {
5249: MatCheckPreallocated(mat, 1);
5250: PetscUseTypeMethod(mat, getrowsumabs, v);
5251: }
5252: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5253: PetscFunctionReturn(PETSC_SUCCESS);
5254: }
5256: /*@
5257: MatGetRowSum - Gets the sum of each row of the matrix
5259: Logically or Neighborhood Collective
5261: Input Parameter:
5262: . mat - the matrix
5264: Output Parameter:
5265: . v - the vector for storing the sum of rows
5267: Level: intermediate
5269: Note:
5270: This code is slow since it is not currently specialized for different formats
5272: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5273: @*/
5274: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5275: {
5276: Vec ones;
5278: PetscFunctionBegin;
5282: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5283: MatCheckPreallocated(mat, 1);
5284: PetscCall(MatCreateVecs(mat, &ones, NULL));
5285: PetscCall(VecSet(ones, 1.));
5286: PetscCall(MatMult(mat, ones, v));
5287: PetscCall(VecDestroy(&ones));
5288: PetscFunctionReturn(PETSC_SUCCESS);
5289: }
5291: /*@
5292: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5293: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5295: Collective
5297: Input Parameter:
5298: . mat - the matrix to provide the transpose
5300: Output Parameter:
5301: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5303: Level: advanced
5305: Note:
5306: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5307: routine allows bypassing that call.
5309: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5310: @*/
5311: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5312: {
5313: PetscContainer rB = NULL;
5314: MatParentState *rb = NULL;
5316: PetscFunctionBegin;
5317: PetscCall(PetscNew(&rb));
5318: rb->id = ((PetscObject)mat)->id;
5319: rb->state = 0;
5320: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5321: PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)B), &rB));
5322: PetscCall(PetscContainerSetPointer(rB, rb));
5323: PetscCall(PetscContainerSetUserDestroy(rB, PetscContainerUserDestroyDefault));
5324: PetscCall(PetscObjectCompose((PetscObject)B, "MatTransposeParent", (PetscObject)rB));
5325: PetscCall(PetscObjectDereference((PetscObject)rB));
5326: PetscFunctionReturn(PETSC_SUCCESS);
5327: }
5329: /*@
5330: MatTranspose - Computes an in-place or out-of-place transpose of a matrix.
5332: Collective
5334: Input Parameters:
5335: + mat - the matrix to transpose
5336: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5338: Output Parameter:
5339: . B - the transpose
5341: Level: intermediate
5343: Notes:
5344: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5346: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX`. If you already have a matrix to contain the
5347: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5349: If the nonzero structure of mat changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5351: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.
5353: If mat is unchanged from the last call this function returns immediately without recomputing the result
5355: If you only need the symbolic transpose, and not the numerical values, use `MatTransposeSymbolic()`
5357: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5358: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5359: @*/
5360: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5361: {
5362: PetscContainer rB = NULL;
5363: MatParentState *rb = NULL;
5365: PetscFunctionBegin;
5368: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5369: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5370: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5371: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5372: MatCheckPreallocated(mat, 1);
5373: if (reuse == MAT_REUSE_MATRIX) {
5374: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5375: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5376: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5377: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5378: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5379: }
5381: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5382: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5383: PetscUseTypeMethod(mat, transpose, reuse, B);
5384: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5385: }
5386: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5388: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5389: if (reuse != MAT_INPLACE_MATRIX) {
5390: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5391: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5392: rb->state = ((PetscObject)mat)->state;
5393: rb->nonzerostate = mat->nonzerostate;
5394: }
5395: PetscFunctionReturn(PETSC_SUCCESS);
5396: }
5398: /*@
5399: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5401: Collective
5403: Input Parameter:
5404: . A - the matrix to transpose
5406: Output Parameter:
5407: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5408: numerical portion.
5410: Level: intermediate
5412: Note:
5413: This is not supported for many matrix types, use `MatTranspose()` in those cases
5415: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5416: @*/
5417: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5418: {
5419: PetscFunctionBegin;
5422: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5423: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5424: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5425: PetscUseTypeMethod(A, transposesymbolic, B);
5426: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5428: PetscCall(MatTransposeSetPrecursor(A, *B));
5429: PetscFunctionReturn(PETSC_SUCCESS);
5430: }
5432: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5433: {
5434: PetscContainer rB;
5435: MatParentState *rb;
5437: PetscFunctionBegin;
5440: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5441: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5442: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5443: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5444: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5445: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5446: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5447: PetscFunctionReturn(PETSC_SUCCESS);
5448: }
5450: /*@
5451: MatIsTranspose - Test whether a matrix is another one's transpose,
5452: or its own, in which case it tests symmetry.
5454: Collective
5456: Input Parameters:
5457: + A - the matrix to test
5458: . B - the matrix to test against, this can equal the first parameter
5459: - tol - tolerance, differences between entries smaller than this are counted as zero
5461: Output Parameter:
5462: . flg - the result
5464: Level: intermediate
5466: Notes:
5467: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5468: test involves parallel copies of the block off-diagonal parts of the matrix.
5470: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5471: @*/
5472: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5473: {
5474: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5476: PetscFunctionBegin;
5479: PetscAssertPointer(flg, 4);
5480: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5481: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5482: *flg = PETSC_FALSE;
5483: if (f && g) {
5484: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5485: PetscCall((*f)(A, B, tol, flg));
5486: } else {
5487: MatType mattype;
5489: PetscCall(MatGetType(f ? B : A, &mattype));
5490: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5491: }
5492: PetscFunctionReturn(PETSC_SUCCESS);
5493: }
5495: /*@
5496: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5498: Collective
5500: Input Parameters:
5501: + mat - the matrix to transpose and complex conjugate
5502: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5504: Output Parameter:
5505: . B - the Hermitian transpose
5507: Level: intermediate
5509: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5510: @*/
5511: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5512: {
5513: PetscFunctionBegin;
5514: PetscCall(MatTranspose(mat, reuse, B));
5515: #if defined(PETSC_USE_COMPLEX)
5516: PetscCall(MatConjugate(*B));
5517: #endif
5518: PetscFunctionReturn(PETSC_SUCCESS);
5519: }
5521: /*@
5522: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5524: Collective
5526: Input Parameters:
5527: + A - the matrix to test
5528: . B - the matrix to test against, this can equal the first parameter
5529: - tol - tolerance, differences between entries smaller than this are counted as zero
5531: Output Parameter:
5532: . flg - the result
5534: Level: intermediate
5536: Notes:
5537: Only available for `MATAIJ` matrices.
5539: The sequential algorithm
5540: has a running time of the order of the number of nonzeros; the parallel
5541: test involves parallel copies of the block off-diagonal parts of the matrix.
5543: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5544: @*/
5545: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5546: {
5547: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5549: PetscFunctionBegin;
5552: PetscAssertPointer(flg, 4);
5553: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5554: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5555: if (f && g) {
5556: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5557: PetscCall((*f)(A, B, tol, flg));
5558: }
5559: PetscFunctionReturn(PETSC_SUCCESS);
5560: }
5562: /*@
5563: MatPermute - Creates a new matrix with rows and columns permuted from the
5564: original.
5566: Collective
5568: Input Parameters:
5569: + mat - the matrix to permute
5570: . row - row permutation, each processor supplies only the permutation for its rows
5571: - col - column permutation, each processor supplies only the permutation for its columns
5573: Output Parameter:
5574: . B - the permuted matrix
5576: Level: advanced
5578: Note:
5579: The index sets map from row/col of permuted matrix to row/col of original matrix.
5580: The index sets should be on the same communicator as mat and have the same local sizes.
5582: Developer Note:
5583: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5584: exploit the fact that row and col are permutations, consider implementing the
5585: more general `MatCreateSubMatrix()` instead.
5587: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5588: @*/
5589: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5590: {
5591: PetscFunctionBegin;
5596: PetscAssertPointer(B, 4);
5597: PetscCheckSameComm(mat, 1, row, 2);
5598: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5599: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5600: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5601: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5602: MatCheckPreallocated(mat, 1);
5604: if (mat->ops->permute) {
5605: PetscUseTypeMethod(mat, permute, row, col, B);
5606: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5607: } else {
5608: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5609: }
5610: PetscFunctionReturn(PETSC_SUCCESS);
5611: }
5613: /*@
5614: MatEqual - Compares two matrices.
5616: Collective
5618: Input Parameters:
5619: + A - the first matrix
5620: - B - the second matrix
5622: Output Parameter:
5623: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5625: Level: intermediate
5627: .seealso: [](ch_matrices), `Mat`
5628: @*/
5629: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5630: {
5631: PetscFunctionBegin;
5636: PetscAssertPointer(flg, 3);
5637: PetscCheckSameComm(A, 1, B, 2);
5638: MatCheckPreallocated(A, 1);
5639: MatCheckPreallocated(B, 2);
5640: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5641: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5642: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5643: B->cmap->N);
5644: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5645: PetscUseTypeMethod(A, equal, B, flg);
5646: } else {
5647: PetscCall(MatMultEqual(A, B, 10, flg));
5648: }
5649: PetscFunctionReturn(PETSC_SUCCESS);
5650: }
5652: /*@
5653: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5654: matrices that are stored as vectors. Either of the two scaling
5655: matrices can be `NULL`.
5657: Collective
5659: Input Parameters:
5660: + mat - the matrix to be scaled
5661: . l - the left scaling vector (or `NULL`)
5662: - r - the right scaling vector (or `NULL`)
5664: Level: intermediate
5666: Note:
5667: `MatDiagonalScale()` computes $A = LAR$, where
5668: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5669: The L scales the rows of the matrix, the R scales the columns of the matrix.
5671: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5672: @*/
5673: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5674: {
5675: PetscFunctionBegin;
5678: if (l) {
5680: PetscCheckSameComm(mat, 1, l, 2);
5681: }
5682: if (r) {
5684: PetscCheckSameComm(mat, 1, r, 3);
5685: }
5686: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5687: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5688: MatCheckPreallocated(mat, 1);
5689: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5691: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5692: PetscUseTypeMethod(mat, diagonalscale, l, r);
5693: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5694: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5695: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5696: PetscFunctionReturn(PETSC_SUCCESS);
5697: }
5699: /*@
5700: MatScale - Scales all elements of a matrix by a given number.
5702: Logically Collective
5704: Input Parameters:
5705: + mat - the matrix to be scaled
5706: - a - the scaling value
5708: Level: intermediate
5710: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5711: @*/
5712: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5713: {
5714: PetscFunctionBegin;
5717: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5718: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5720: MatCheckPreallocated(mat, 1);
5722: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5723: if (a != (PetscScalar)1.0) {
5724: PetscUseTypeMethod(mat, scale, a);
5725: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5726: }
5727: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5728: PetscFunctionReturn(PETSC_SUCCESS);
5729: }
5731: /*@
5732: MatNorm - Calculates various norms of a matrix.
5734: Collective
5736: Input Parameters:
5737: + mat - the matrix
5738: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5740: Output Parameter:
5741: . nrm - the resulting norm
5743: Level: intermediate
5745: .seealso: [](ch_matrices), `Mat`
5746: @*/
5747: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5748: {
5749: PetscFunctionBegin;
5752: PetscAssertPointer(nrm, 3);
5754: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5755: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5756: MatCheckPreallocated(mat, 1);
5758: PetscUseTypeMethod(mat, norm, type, nrm);
5759: PetscFunctionReturn(PETSC_SUCCESS);
5760: }
5762: /*
5763: This variable is used to prevent counting of MatAssemblyBegin() that
5764: are called from within a MatAssemblyEnd().
5765: */
5766: static PetscInt MatAssemblyEnd_InUse = 0;
5767: /*@
5768: MatAssemblyBegin - Begins assembling the matrix. This routine should
5769: be called after completing all calls to `MatSetValues()`.
5771: Collective
5773: Input Parameters:
5774: + mat - the matrix
5775: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5777: Level: beginner
5779: Notes:
5780: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5781: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5783: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5784: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5785: using the matrix.
5787: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5788: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5789: a global collective operation requiring all processes that share the matrix.
5791: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5792: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5793: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5795: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5796: @*/
5797: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5798: {
5799: PetscFunctionBegin;
5802: MatCheckPreallocated(mat, 1);
5803: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5804: if (mat->assembled) {
5805: mat->was_assembled = PETSC_TRUE;
5806: mat->assembled = PETSC_FALSE;
5807: }
5809: if (!MatAssemblyEnd_InUse) {
5810: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5811: PetscTryTypeMethod(mat, assemblybegin, type);
5812: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5813: } else PetscTryTypeMethod(mat, assemblybegin, type);
5814: PetscFunctionReturn(PETSC_SUCCESS);
5815: }
5817: /*@
5818: MatAssembled - Indicates if a matrix has been assembled and is ready for
5819: use; for example, in matrix-vector product.
5821: Not Collective
5823: Input Parameter:
5824: . mat - the matrix
5826: Output Parameter:
5827: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5829: Level: advanced
5831: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5832: @*/
5833: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5834: {
5835: PetscFunctionBegin;
5837: PetscAssertPointer(assembled, 2);
5838: *assembled = mat->assembled;
5839: PetscFunctionReturn(PETSC_SUCCESS);
5840: }
5842: /*@
5843: MatAssemblyEnd - Completes assembling the matrix. This routine should
5844: be called after `MatAssemblyBegin()`.
5846: Collective
5848: Input Parameters:
5849: + mat - the matrix
5850: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5852: Options Database Keys:
5853: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5854: . -mat_view ::ascii_info_detail - Prints more detailed info
5855: . -mat_view - Prints matrix in ASCII format
5856: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5857: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5858: . -display <name> - Sets display name (default is host)
5859: . -draw_pause <sec> - Sets number of seconds to pause after display
5860: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5861: . -viewer_socket_machine <machine> - Machine to use for socket
5862: . -viewer_socket_port <port> - Port number to use for socket
5863: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5865: Level: beginner
5867: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5868: @*/
5869: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5870: {
5871: static PetscInt inassm = 0;
5872: PetscBool flg = PETSC_FALSE;
5874: PetscFunctionBegin;
5878: inassm++;
5879: MatAssemblyEnd_InUse++;
5880: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5881: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5882: PetscTryTypeMethod(mat, assemblyend, type);
5883: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5884: } else PetscTryTypeMethod(mat, assemblyend, type);
5886: /* Flush assembly is not a true assembly */
5887: if (type != MAT_FLUSH_ASSEMBLY) {
5888: if (mat->num_ass) {
5889: if (!mat->symmetry_eternal) {
5890: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5891: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5892: }
5893: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5894: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5895: }
5896: mat->num_ass++;
5897: mat->assembled = PETSC_TRUE;
5898: mat->ass_nonzerostate = mat->nonzerostate;
5899: }
5901: mat->insertmode = NOT_SET_VALUES;
5902: MatAssemblyEnd_InUse--;
5903: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5904: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5905: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5907: if (mat->checksymmetryonassembly) {
5908: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5909: if (flg) {
5910: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5911: } else {
5912: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5913: }
5914: }
5915: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5916: }
5917: inassm--;
5918: PetscFunctionReturn(PETSC_SUCCESS);
5919: }
5921: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5922: /*@
5923: MatSetOption - Sets a parameter option for a matrix. Some options
5924: may be specific to certain storage formats. Some options
5925: determine how values will be inserted (or added). Sorted,
5926: row-oriented input will generally assemble the fastest. The default
5927: is row-oriented.
5929: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5931: Input Parameters:
5932: + mat - the matrix
5933: . op - the option, one of those listed below (and possibly others),
5934: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5936: Options Describing Matrix Structure:
5937: + `MAT_SPD` - symmetric positive definite
5938: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5939: . `MAT_HERMITIAN` - transpose is the complex conjugation
5940: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5941: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5942: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5943: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5945: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5946: do not need to be computed (usually at a high cost)
5948: Options For Use with `MatSetValues()`:
5949: Insert a logically dense subblock, which can be
5950: . `MAT_ROW_ORIENTED` - row-oriented (default)
5952: These options reflect the data you pass in with `MatSetValues()`; it has
5953: nothing to do with how the data is stored internally in the matrix
5954: data structure.
5956: When (re)assembling a matrix, we can restrict the input for
5957: efficiency/debugging purposes. These options include
5958: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5959: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5960: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5961: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5962: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5963: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5964: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5965: performance for very large process counts.
5966: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5967: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5968: functions, instead sending only neighbor messages.
5970: Level: intermediate
5972: Notes:
5973: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5975: Some options are relevant only for particular matrix types and
5976: are thus ignored by others. Other options are not supported by
5977: certain matrix types and will generate an error message if set.
5979: If using Fortran to compute a matrix, one may need to
5980: use the column-oriented option (or convert to the row-oriented
5981: format).
5983: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
5984: that would generate a new entry in the nonzero structure is instead
5985: ignored. Thus, if memory has not already been allocated for this particular
5986: data, then the insertion is ignored. For dense matrices, in which
5987: the entire array is allocated, no entries are ever ignored.
5988: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5990: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
5991: that would generate a new entry in the nonzero structure instead produces
5992: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5994: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
5995: that would generate a new entry that has not been preallocated will
5996: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
5997: only.) This is a useful flag when debugging matrix memory preallocation.
5998: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6000: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
6001: other processors should be dropped, rather than stashed.
6002: This is useful if you know that the "owning" processor is also
6003: always generating the correct matrix entries, so that PETSc need
6004: not transfer duplicate entries generated on another processor.
6006: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6007: searches during matrix assembly. When this flag is set, the hash table
6008: is created during the first matrix assembly. This hash table is
6009: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6010: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6011: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6012: supported by `MATMPIBAIJ` format only.
6014: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6015: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6017: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6018: a zero location in the matrix
6020: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6022: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6023: zero row routines and thus improves performance for very large process counts.
6025: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6026: part of the matrix (since they should match the upper triangular part).
6028: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6029: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6030: with finite difference schemes with non-periodic boundary conditions.
6032: Developer Note:
6033: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6034: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6035: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6036: not changed.
6038: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6039: @*/
6040: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6041: {
6042: PetscFunctionBegin;
6044: if (op > 0) {
6047: }
6049: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6051: switch (op) {
6052: case MAT_FORCE_DIAGONAL_ENTRIES:
6053: mat->force_diagonals = flg;
6054: PetscFunctionReturn(PETSC_SUCCESS);
6055: case MAT_NO_OFF_PROC_ENTRIES:
6056: mat->nooffprocentries = flg;
6057: PetscFunctionReturn(PETSC_SUCCESS);
6058: case MAT_SUBSET_OFF_PROC_ENTRIES:
6059: mat->assembly_subset = flg;
6060: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6061: #if !defined(PETSC_HAVE_MPIUNI)
6062: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6063: #endif
6064: mat->stash.first_assembly_done = PETSC_FALSE;
6065: }
6066: PetscFunctionReturn(PETSC_SUCCESS);
6067: case MAT_NO_OFF_PROC_ZERO_ROWS:
6068: mat->nooffproczerorows = flg;
6069: PetscFunctionReturn(PETSC_SUCCESS);
6070: case MAT_SPD:
6071: if (flg) {
6072: mat->spd = PETSC_BOOL3_TRUE;
6073: mat->symmetric = PETSC_BOOL3_TRUE;
6074: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6075: } else {
6076: mat->spd = PETSC_BOOL3_FALSE;
6077: }
6078: break;
6079: case MAT_SYMMETRIC:
6080: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6081: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6082: #if !defined(PETSC_USE_COMPLEX)
6083: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6084: #endif
6085: break;
6086: case MAT_HERMITIAN:
6087: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6088: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6089: #if !defined(PETSC_USE_COMPLEX)
6090: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6091: #endif
6092: break;
6093: case MAT_STRUCTURALLY_SYMMETRIC:
6094: mat->structurally_symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6095: break;
6096: case MAT_SYMMETRY_ETERNAL:
6097: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6098: mat->symmetry_eternal = flg;
6099: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6100: break;
6101: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6102: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6103: mat->structural_symmetry_eternal = flg;
6104: break;
6105: case MAT_SPD_ETERNAL:
6106: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6107: mat->spd_eternal = flg;
6108: if (flg) {
6109: mat->structural_symmetry_eternal = PETSC_TRUE;
6110: mat->symmetry_eternal = PETSC_TRUE;
6111: }
6112: break;
6113: case MAT_STRUCTURE_ONLY:
6114: mat->structure_only = flg;
6115: break;
6116: case MAT_SORTED_FULL:
6117: mat->sortedfull = flg;
6118: break;
6119: default:
6120: break;
6121: }
6122: PetscTryTypeMethod(mat, setoption, op, flg);
6123: PetscFunctionReturn(PETSC_SUCCESS);
6124: }
6126: /*@
6127: MatGetOption - Gets a parameter option that has been set for a matrix.
6129: Logically Collective
6131: Input Parameters:
6132: + mat - the matrix
6133: - op - the option, this only responds to certain options, check the code for which ones
6135: Output Parameter:
6136: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6138: Level: intermediate
6140: Notes:
6141: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6143: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6144: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6146: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6147: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6148: @*/
6149: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6150: {
6151: PetscFunctionBegin;
6155: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6156: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6158: switch (op) {
6159: case MAT_NO_OFF_PROC_ENTRIES:
6160: *flg = mat->nooffprocentries;
6161: break;
6162: case MAT_NO_OFF_PROC_ZERO_ROWS:
6163: *flg = mat->nooffproczerorows;
6164: break;
6165: case MAT_SYMMETRIC:
6166: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6167: break;
6168: case MAT_HERMITIAN:
6169: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6170: break;
6171: case MAT_STRUCTURALLY_SYMMETRIC:
6172: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6173: break;
6174: case MAT_SPD:
6175: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6176: break;
6177: case MAT_SYMMETRY_ETERNAL:
6178: *flg = mat->symmetry_eternal;
6179: break;
6180: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6181: *flg = mat->symmetry_eternal;
6182: break;
6183: default:
6184: break;
6185: }
6186: PetscFunctionReturn(PETSC_SUCCESS);
6187: }
6189: /*@
6190: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6191: this routine retains the old nonzero structure.
6193: Logically Collective
6195: Input Parameter:
6196: . mat - the matrix
6198: Level: intermediate
6200: Note:
6201: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6202: See the Performance chapter of the users manual for information on preallocating matrices.
6204: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6205: @*/
6206: PetscErrorCode MatZeroEntries(Mat mat)
6207: {
6208: PetscFunctionBegin;
6211: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6212: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6213: MatCheckPreallocated(mat, 1);
6215: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6216: PetscUseTypeMethod(mat, zeroentries);
6217: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6218: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6219: PetscFunctionReturn(PETSC_SUCCESS);
6220: }
6222: /*@
6223: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6224: of a set of rows and columns of a matrix.
6226: Collective
6228: Input Parameters:
6229: + mat - the matrix
6230: . numRows - the number of rows/columns to zero
6231: . rows - the global row indices
6232: . diag - value put in the diagonal of the eliminated rows
6233: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6234: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6236: Level: intermediate
6238: Notes:
6239: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6241: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6242: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6244: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6245: Krylov method to take advantage of the known solution on the zeroed rows.
6247: For the parallel case, all processes that share the matrix (i.e.,
6248: those in the communicator used for matrix creation) MUST call this
6249: routine, regardless of whether any rows being zeroed are owned by
6250: them.
6252: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6253: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6254: missing.
6256: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6257: list only rows local to itself).
6259: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6261: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6262: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6263: @*/
6264: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6265: {
6266: PetscFunctionBegin;
6269: if (numRows) PetscAssertPointer(rows, 3);
6270: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6271: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6272: MatCheckPreallocated(mat, 1);
6274: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6275: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6276: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6277: PetscFunctionReturn(PETSC_SUCCESS);
6278: }
6280: /*@
6281: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6282: of a set of rows and columns of a matrix.
6284: Collective
6286: Input Parameters:
6287: + mat - the matrix
6288: . is - the rows to zero
6289: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6290: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6291: - b - optional vector of right-hand side, that will be adjusted by provided solution
6293: Level: intermediate
6295: Note:
6296: See `MatZeroRowsColumns()` for details on how this routine operates.
6298: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6299: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6300: @*/
6301: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6302: {
6303: PetscInt numRows;
6304: const PetscInt *rows;
6306: PetscFunctionBegin;
6311: PetscCall(ISGetLocalSize(is, &numRows));
6312: PetscCall(ISGetIndices(is, &rows));
6313: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6314: PetscCall(ISRestoreIndices(is, &rows));
6315: PetscFunctionReturn(PETSC_SUCCESS);
6316: }
6318: /*@
6319: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6320: of a set of rows of a matrix.
6322: Collective
6324: Input Parameters:
6325: + mat - the matrix
6326: . numRows - the number of rows to zero
6327: . rows - the global row indices
6328: . diag - value put in the diagonal of the zeroed rows
6329: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6330: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6332: Level: intermediate
6334: Notes:
6335: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6337: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6339: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6340: Krylov method to take advantage of the known solution on the zeroed rows.
6342: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6343: from the matrix.
6345: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6346: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense and block diagonal
6347: formats this does not alter the nonzero structure.
6349: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6350: of the matrix is not changed the values are
6351: merely zeroed.
6353: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6354: formats can optionally remove the main diagonal entry from the
6355: nonzero structure as well, by passing 0.0 as the final argument).
6357: For the parallel case, all processes that share the matrix (i.e.,
6358: those in the communicator used for matrix creation) MUST call this
6359: routine, regardless of whether any rows being zeroed are owned by
6360: them.
6362: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6363: list only rows local to itself).
6365: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6366: owns that are to be zeroed. This saves a global synchronization in the implementation.
6368: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6369: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6370: @*/
6371: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6372: {
6373: PetscFunctionBegin;
6376: if (numRows) PetscAssertPointer(rows, 3);
6377: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6378: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6379: MatCheckPreallocated(mat, 1);
6381: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6382: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6383: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6384: PetscFunctionReturn(PETSC_SUCCESS);
6385: }
6387: /*@
6388: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6389: of a set of rows of a matrix.
6391: Collective
6393: Input Parameters:
6394: + mat - the matrix
6395: . is - index set of rows to remove (if `NULL` then no row is removed)
6396: . diag - value put in all diagonals of eliminated rows
6397: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6398: - b - optional vector of right-hand side, that will be adjusted by provided solution
6400: Level: intermediate
6402: Note:
6403: See `MatZeroRows()` for details on how this routine operates.
6405: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6406: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6407: @*/
6408: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6409: {
6410: PetscInt numRows = 0;
6411: const PetscInt *rows = NULL;
6413: PetscFunctionBegin;
6416: if (is) {
6418: PetscCall(ISGetLocalSize(is, &numRows));
6419: PetscCall(ISGetIndices(is, &rows));
6420: }
6421: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6422: if (is) PetscCall(ISRestoreIndices(is, &rows));
6423: PetscFunctionReturn(PETSC_SUCCESS);
6424: }
6426: /*@
6427: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6428: of a set of rows of a matrix. These rows must be local to the process.
6430: Collective
6432: Input Parameters:
6433: + mat - the matrix
6434: . numRows - the number of rows to remove
6435: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6436: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6437: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6438: - b - optional vector of right-hand side, that will be adjusted by provided solution
6440: Level: intermediate
6442: Notes:
6443: See `MatZeroRows()` for details on how this routine operates.
6445: The grid coordinates are across the entire grid, not just the local portion
6447: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6448: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6449: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6450: `DM_BOUNDARY_PERIODIC` boundary type.
6452: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6453: a single value per point) you can skip filling those indices.
6455: Fortran Note:
6456: `idxm` and `idxn` should be declared as
6457: $ MatStencil idxm(4, m)
6458: and the values inserted using
6459: .vb
6460: idxm(MatStencil_i, 1) = i
6461: idxm(MatStencil_j, 1) = j
6462: idxm(MatStencil_k, 1) = k
6463: idxm(MatStencil_c, 1) = c
6464: etc
6465: .ve
6467: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6468: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6469: @*/
6470: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6471: {
6472: PetscInt dim = mat->stencil.dim;
6473: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6474: PetscInt *dims = mat->stencil.dims + 1;
6475: PetscInt *starts = mat->stencil.starts;
6476: PetscInt *dxm = (PetscInt *)rows;
6477: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6479: PetscFunctionBegin;
6482: if (numRows) PetscAssertPointer(rows, 3);
6484: PetscCall(PetscMalloc1(numRows, &jdxm));
6485: for (i = 0; i < numRows; ++i) {
6486: /* Skip unused dimensions (they are ordered k, j, i, c) */
6487: for (j = 0; j < 3 - sdim; ++j) dxm++;
6488: /* Local index in X dir */
6489: tmp = *dxm++ - starts[0];
6490: /* Loop over remaining dimensions */
6491: for (j = 0; j < dim - 1; ++j) {
6492: /* If nonlocal, set index to be negative */
6493: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6494: /* Update local index */
6495: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6496: }
6497: /* Skip component slot if necessary */
6498: if (mat->stencil.noc) dxm++;
6499: /* Local row number */
6500: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6501: }
6502: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6503: PetscCall(PetscFree(jdxm));
6504: PetscFunctionReturn(PETSC_SUCCESS);
6505: }
6507: /*@
6508: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6509: of a set of rows and columns of a matrix.
6511: Collective
6513: Input Parameters:
6514: + mat - the matrix
6515: . numRows - the number of rows/columns to remove
6516: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6517: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6518: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6519: - b - optional vector of right-hand side, that will be adjusted by provided solution
6521: Level: intermediate
6523: Notes:
6524: See `MatZeroRowsColumns()` for details on how this routine operates.
6526: The grid coordinates are across the entire grid, not just the local portion
6528: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6529: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6530: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6531: `DM_BOUNDARY_PERIODIC` boundary type.
6533: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6534: a single value per point) you can skip filling those indices.
6536: Fortran Note:
6537: `idxm` and `idxn` should be declared as
6538: $ MatStencil idxm(4, m)
6539: and the values inserted using
6540: .vb
6541: idxm(MatStencil_i, 1) = i
6542: idxm(MatStencil_j, 1) = j
6543: idxm(MatStencil_k, 1) = k
6544: idxm(MatStencil_c, 1) = c
6545: etc
6546: .ve
6548: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6549: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6550: @*/
6551: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6552: {
6553: PetscInt dim = mat->stencil.dim;
6554: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6555: PetscInt *dims = mat->stencil.dims + 1;
6556: PetscInt *starts = mat->stencil.starts;
6557: PetscInt *dxm = (PetscInt *)rows;
6558: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6560: PetscFunctionBegin;
6563: if (numRows) PetscAssertPointer(rows, 3);
6565: PetscCall(PetscMalloc1(numRows, &jdxm));
6566: for (i = 0; i < numRows; ++i) {
6567: /* Skip unused dimensions (they are ordered k, j, i, c) */
6568: for (j = 0; j < 3 - sdim; ++j) dxm++;
6569: /* Local index in X dir */
6570: tmp = *dxm++ - starts[0];
6571: /* Loop over remaining dimensions */
6572: for (j = 0; j < dim - 1; ++j) {
6573: /* If nonlocal, set index to be negative */
6574: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6575: /* Update local index */
6576: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6577: }
6578: /* Skip component slot if necessary */
6579: if (mat->stencil.noc) dxm++;
6580: /* Local row number */
6581: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6582: }
6583: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6584: PetscCall(PetscFree(jdxm));
6585: PetscFunctionReturn(PETSC_SUCCESS);
6586: }
6588: /*@
6589: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6590: of a set of rows of a matrix; using local numbering of rows.
6592: Collective
6594: Input Parameters:
6595: + mat - the matrix
6596: . numRows - the number of rows to remove
6597: . rows - the local row indices
6598: . diag - value put in all diagonals of eliminated rows
6599: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6600: - b - optional vector of right-hand side, that will be adjusted by provided solution
6602: Level: intermediate
6604: Notes:
6605: Before calling `MatZeroRowsLocal()`, the user must first set the
6606: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6608: See `MatZeroRows()` for details on how this routine operates.
6610: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6611: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6612: @*/
6613: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6614: {
6615: PetscFunctionBegin;
6618: if (numRows) PetscAssertPointer(rows, 3);
6619: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6620: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6621: MatCheckPreallocated(mat, 1);
6623: if (mat->ops->zerorowslocal) {
6624: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6625: } else {
6626: IS is, newis;
6627: const PetscInt *newRows;
6629: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6630: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6631: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6632: PetscCall(ISGetIndices(newis, &newRows));
6633: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6634: PetscCall(ISRestoreIndices(newis, &newRows));
6635: PetscCall(ISDestroy(&newis));
6636: PetscCall(ISDestroy(&is));
6637: }
6638: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6639: PetscFunctionReturn(PETSC_SUCCESS);
6640: }
6642: /*@
6643: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6644: of a set of rows of a matrix; using local numbering of rows.
6646: Collective
6648: Input Parameters:
6649: + mat - the matrix
6650: . is - index set of rows to remove
6651: . diag - value put in all diagonals of eliminated rows
6652: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6653: - b - optional vector of right-hand side, that will be adjusted by provided solution
6655: Level: intermediate
6657: Notes:
6658: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6659: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6661: See `MatZeroRows()` for details on how this routine operates.
6663: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6664: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6665: @*/
6666: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6667: {
6668: PetscInt numRows;
6669: const PetscInt *rows;
6671: PetscFunctionBegin;
6675: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6676: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6677: MatCheckPreallocated(mat, 1);
6679: PetscCall(ISGetLocalSize(is, &numRows));
6680: PetscCall(ISGetIndices(is, &rows));
6681: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6682: PetscCall(ISRestoreIndices(is, &rows));
6683: PetscFunctionReturn(PETSC_SUCCESS);
6684: }
6686: /*@
6687: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6688: of a set of rows and columns of a matrix; using local numbering of rows.
6690: Collective
6692: Input Parameters:
6693: + mat - the matrix
6694: . numRows - the number of rows to remove
6695: . rows - the global row indices
6696: . diag - value put in all diagonals of eliminated rows
6697: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6698: - b - optional vector of right-hand side, that will be adjusted by provided solution
6700: Level: intermediate
6702: Notes:
6703: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6704: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6706: See `MatZeroRowsColumns()` for details on how this routine operates.
6708: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6709: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6710: @*/
6711: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6712: {
6713: IS is, newis;
6714: const PetscInt *newRows;
6716: PetscFunctionBegin;
6719: if (numRows) PetscAssertPointer(rows, 3);
6720: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6721: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6722: MatCheckPreallocated(mat, 1);
6724: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6725: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6726: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6727: PetscCall(ISGetIndices(newis, &newRows));
6728: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6729: PetscCall(ISRestoreIndices(newis, &newRows));
6730: PetscCall(ISDestroy(&newis));
6731: PetscCall(ISDestroy(&is));
6732: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6733: PetscFunctionReturn(PETSC_SUCCESS);
6734: }
6736: /*@
6737: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6738: of a set of rows and columns of a matrix; using local numbering of rows.
6740: Collective
6742: Input Parameters:
6743: + mat - the matrix
6744: . is - index set of rows to remove
6745: . diag - value put in all diagonals of eliminated rows
6746: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6747: - b - optional vector of right-hand side, that will be adjusted by provided solution
6749: Level: intermediate
6751: Notes:
6752: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6753: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6755: See `MatZeroRowsColumns()` for details on how this routine operates.
6757: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6758: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6759: @*/
6760: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6761: {
6762: PetscInt numRows;
6763: const PetscInt *rows;
6765: PetscFunctionBegin;
6769: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6770: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6771: MatCheckPreallocated(mat, 1);
6773: PetscCall(ISGetLocalSize(is, &numRows));
6774: PetscCall(ISGetIndices(is, &rows));
6775: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6776: PetscCall(ISRestoreIndices(is, &rows));
6777: PetscFunctionReturn(PETSC_SUCCESS);
6778: }
6780: /*@
6781: MatGetSize - Returns the numbers of rows and columns in a matrix.
6783: Not Collective
6785: Input Parameter:
6786: . mat - the matrix
6788: Output Parameters:
6789: + m - the number of global rows
6790: - n - the number of global columns
6792: Level: beginner
6794: Note:
6795: Both output parameters can be `NULL` on input.
6797: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6798: @*/
6799: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6800: {
6801: PetscFunctionBegin;
6803: if (m) *m = mat->rmap->N;
6804: if (n) *n = mat->cmap->N;
6805: PetscFunctionReturn(PETSC_SUCCESS);
6806: }
6808: /*@
6809: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6810: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6812: Not Collective
6814: Input Parameter:
6815: . mat - the matrix
6817: Output Parameters:
6818: + m - the number of local rows, use `NULL` to not obtain this value
6819: - n - the number of local columns, use `NULL` to not obtain this value
6821: Level: beginner
6823: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6824: @*/
6825: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6826: {
6827: PetscFunctionBegin;
6829: if (m) PetscAssertPointer(m, 2);
6830: if (n) PetscAssertPointer(n, 3);
6831: if (m) *m = mat->rmap->n;
6832: if (n) *n = mat->cmap->n;
6833: PetscFunctionReturn(PETSC_SUCCESS);
6834: }
6836: /*@
6837: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6838: vector one multiplies this matrix by that are owned by this processor.
6840: Not Collective, unless matrix has not been allocated, then collective
6842: Input Parameter:
6843: . mat - the matrix
6845: Output Parameters:
6846: + m - the global index of the first local column, use `NULL` to not obtain this value
6847: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6849: Level: developer
6851: Notes:
6852: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6854: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6855: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6857: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6858: the local values in the matrix.
6860: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6861: Layouts](sec_matlayout) for details on matrix layouts.
6863: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6864: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6865: @*/
6866: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6867: {
6868: PetscFunctionBegin;
6871: if (m) PetscAssertPointer(m, 2);
6872: if (n) PetscAssertPointer(n, 3);
6873: MatCheckPreallocated(mat, 1);
6874: if (m) *m = mat->cmap->rstart;
6875: if (n) *n = mat->cmap->rend;
6876: PetscFunctionReturn(PETSC_SUCCESS);
6877: }
6879: /*@
6880: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6881: this MPI process.
6883: Not Collective
6885: Input Parameter:
6886: . mat - the matrix
6888: Output Parameters:
6889: + m - the global index of the first local row, use `NULL` to not obtain this value
6890: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6892: Level: beginner
6894: Notes:
6895: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6897: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6898: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6900: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6901: the local values in the matrix.
6903: The high argument is one more than the last element stored locally.
6905: For all matrices it returns the range of matrix rows associated with rows of a vector that
6906: would contain the result of a matrix vector product with this matrix. See [Matrix
6907: Layouts](sec_matlayout) for details on matrix layouts.
6909: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
6910: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6911: @*/
6912: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6913: {
6914: PetscFunctionBegin;
6917: if (m) PetscAssertPointer(m, 2);
6918: if (n) PetscAssertPointer(n, 3);
6919: MatCheckPreallocated(mat, 1);
6920: if (m) *m = mat->rmap->rstart;
6921: if (n) *n = mat->rmap->rend;
6922: PetscFunctionReturn(PETSC_SUCCESS);
6923: }
6925: /*@C
6926: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6927: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6929: Not Collective, unless matrix has not been allocated
6931: Input Parameter:
6932: . mat - the matrix
6934: Output Parameter:
6935: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
6936: where `size` is the number of MPI processes used by `mat`
6938: Level: beginner
6940: Notes:
6941: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6943: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6944: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6946: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6947: the local values in the matrix.
6949: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6950: would contain the result of a matrix vector product with this matrix. See [Matrix
6951: Layouts](sec_matlayout) for details on matrix layouts.
6953: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6954: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
6955: `DMDAGetGhostCorners()`, `DM`
6956: @*/
6957: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
6958: {
6959: PetscFunctionBegin;
6962: MatCheckPreallocated(mat, 1);
6963: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6964: PetscFunctionReturn(PETSC_SUCCESS);
6965: }
6967: /*@C
6968: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
6969: vector one multiplies this vector by that are owned by each processor.
6971: Not Collective, unless matrix has not been allocated
6973: Input Parameter:
6974: . mat - the matrix
6976: Output Parameter:
6977: . ranges - start of each processors portion plus one more than the total length at the end
6979: Level: beginner
6981: Notes:
6982: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6984: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6985: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6987: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6988: the local values in the matrix.
6990: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
6991: Layouts](sec_matlayout) for details on matrix layouts.
6993: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
6994: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
6995: `DMDAGetGhostCorners()`, `DM`
6996: @*/
6997: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
6998: {
6999: PetscFunctionBegin;
7002: MatCheckPreallocated(mat, 1);
7003: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
7004: PetscFunctionReturn(PETSC_SUCCESS);
7005: }
7007: /*@
7008: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7010: Not Collective
7012: Input Parameter:
7013: . A - matrix
7015: Output Parameters:
7016: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7017: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7019: Level: intermediate
7021: Note:
7022: You should call `ISDestroy()` on the returned `IS`
7024: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7025: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7026: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7027: details on matrix layouts.
7029: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7030: @*/
7031: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7032: {
7033: PetscErrorCode (*f)(Mat, IS *, IS *);
7035: PetscFunctionBegin;
7038: MatCheckPreallocated(A, 1);
7039: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7040: if (f) {
7041: PetscCall((*f)(A, rows, cols));
7042: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7043: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7044: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7045: }
7046: PetscFunctionReturn(PETSC_SUCCESS);
7047: }
7049: /*@
7050: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7051: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7052: to complete the factorization.
7054: Collective
7056: Input Parameters:
7057: + fact - the factorized matrix obtained with `MatGetFactor()`
7058: . mat - the matrix
7059: . row - row permutation
7060: . col - column permutation
7061: - info - structure containing
7062: .vb
7063: levels - number of levels of fill.
7064: expected fill - as ratio of original fill.
7065: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7066: missing diagonal entries)
7067: .ve
7069: Level: developer
7071: Notes:
7072: See [Matrix Factorization](sec_matfactor) for additional information.
7074: Most users should employ the `KSP` interface for linear solvers
7075: instead of working directly with matrix algebra routines such as this.
7076: See, e.g., `KSPCreate()`.
7078: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7080: Developer Note:
7081: The Fortran interface is not autogenerated as the
7082: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7084: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
7085: `MatGetOrdering()`, `MatFactorInfo`
7086: @*/
7087: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7088: {
7089: PetscFunctionBegin;
7094: PetscAssertPointer(info, 5);
7095: PetscAssertPointer(fact, 1);
7096: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7097: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7098: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7099: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7100: MatCheckPreallocated(mat, 2);
7102: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7103: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7104: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7105: PetscFunctionReturn(PETSC_SUCCESS);
7106: }
7108: /*@
7109: MatICCFactorSymbolic - Performs symbolic incomplete
7110: Cholesky factorization for a symmetric matrix. Use
7111: `MatCholeskyFactorNumeric()` to complete the factorization.
7113: Collective
7115: Input Parameters:
7116: + fact - the factorized matrix obtained with `MatGetFactor()`
7117: . mat - the matrix to be factored
7118: . perm - row and column permutation
7119: - info - structure containing
7120: .vb
7121: levels - number of levels of fill.
7122: expected fill - as ratio of original fill.
7123: .ve
7125: Level: developer
7127: Notes:
7128: Most users should employ the `KSP` interface for linear solvers
7129: instead of working directly with matrix algebra routines such as this.
7130: See, e.g., `KSPCreate()`.
7132: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7134: Developer Note:
7135: The Fortran interface is not autogenerated as the
7136: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7138: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7139: @*/
7140: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7141: {
7142: PetscFunctionBegin;
7146: PetscAssertPointer(info, 4);
7147: PetscAssertPointer(fact, 1);
7148: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7149: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7150: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7151: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7152: MatCheckPreallocated(mat, 2);
7154: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7155: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7156: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7157: PetscFunctionReturn(PETSC_SUCCESS);
7158: }
7160: /*@C
7161: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7162: points to an array of valid matrices, they may be reused to store the new
7163: submatrices.
7165: Collective
7167: Input Parameters:
7168: + mat - the matrix
7169: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7170: . irow - index set of rows to extract
7171: . icol - index set of columns to extract
7172: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7174: Output Parameter:
7175: . submat - the array of submatrices
7177: Level: advanced
7179: Notes:
7180: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7181: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7182: to extract a parallel submatrix.
7184: Some matrix types place restrictions on the row and column
7185: indices, such as that they be sorted or that they be equal to each other.
7187: The index sets may not have duplicate entries.
7189: When extracting submatrices from a parallel matrix, each processor can
7190: form a different submatrix by setting the rows and columns of its
7191: individual index sets according to the local submatrix desired.
7193: When finished using the submatrices, the user should destroy
7194: them with `MatDestroySubMatrices()`.
7196: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7197: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7199: This routine creates the matrices in submat; you should NOT create them before
7200: calling it. It also allocates the array of matrix pointers submat.
7202: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7203: request one row/column in a block, they must request all rows/columns that are in
7204: that block. For example, if the block size is 2 you cannot request just row 0 and
7205: column 0.
7207: Fortran Note:
7208: One must pass in as `submat` a `Mat` array of size at least `n`+1.
7210: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7211: @*/
7212: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7213: {
7214: PetscInt i;
7215: PetscBool eq;
7217: PetscFunctionBegin;
7220: if (n) {
7221: PetscAssertPointer(irow, 3);
7223: PetscAssertPointer(icol, 4);
7225: }
7226: PetscAssertPointer(submat, 6);
7227: if (n && scall == MAT_REUSE_MATRIX) {
7228: PetscAssertPointer(*submat, 6);
7230: }
7231: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7232: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7233: MatCheckPreallocated(mat, 1);
7234: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7235: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7236: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7237: for (i = 0; i < n; i++) {
7238: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7239: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7240: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7241: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7242: if (mat->boundtocpu && mat->bindingpropagates) {
7243: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7244: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7245: }
7246: #endif
7247: }
7248: PetscFunctionReturn(PETSC_SUCCESS);
7249: }
7251: /*@C
7252: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of `IS` that may live on subcomms).
7254: Collective
7256: Input Parameters:
7257: + mat - the matrix
7258: . n - the number of submatrixes to be extracted
7259: . irow - index set of rows to extract
7260: . icol - index set of columns to extract
7261: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7263: Output Parameter:
7264: . submat - the array of submatrices
7266: Level: advanced
7268: Note:
7269: This is used by `PCGASM`
7271: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7272: @*/
7273: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7274: {
7275: PetscInt i;
7276: PetscBool eq;
7278: PetscFunctionBegin;
7281: if (n) {
7282: PetscAssertPointer(irow, 3);
7284: PetscAssertPointer(icol, 4);
7286: }
7287: PetscAssertPointer(submat, 6);
7288: if (n && scall == MAT_REUSE_MATRIX) {
7289: PetscAssertPointer(*submat, 6);
7291: }
7292: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7293: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7294: MatCheckPreallocated(mat, 1);
7296: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7297: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7298: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7299: for (i = 0; i < n; i++) {
7300: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7301: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7302: }
7303: PetscFunctionReturn(PETSC_SUCCESS);
7304: }
7306: /*@C
7307: MatDestroyMatrices - Destroys an array of matrices.
7309: Collective
7311: Input Parameters:
7312: + n - the number of local matrices
7313: - mat - the matrices (this is a pointer to the array of matrices)
7315: Level: advanced
7317: Notes:
7318: Frees not only the matrices, but also the array that contains the matrices
7320: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7322: Fortran Note:
7323: Does not free the `mat` array.
7325: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7326: @*/
7327: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7328: {
7329: PetscInt i;
7331: PetscFunctionBegin;
7332: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7333: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7334: PetscAssertPointer(mat, 2);
7336: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7338: /* memory is allocated even if n = 0 */
7339: PetscCall(PetscFree(*mat));
7340: PetscFunctionReturn(PETSC_SUCCESS);
7341: }
7343: /*@C
7344: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7346: Collective
7348: Input Parameters:
7349: + n - the number of local matrices
7350: - mat - the matrices (this is a pointer to the array of matrices, just to match the calling
7351: sequence of `MatCreateSubMatrices()`)
7353: Level: advanced
7355: Note:
7356: Frees not only the matrices, but also the array that contains the matrices
7358: Fortran Note:
7359: Does not free the `mat` array.
7361: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7362: @*/
7363: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7364: {
7365: Mat mat0;
7367: PetscFunctionBegin;
7368: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7369: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7370: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7371: PetscAssertPointer(mat, 2);
7373: mat0 = (*mat)[0];
7374: if (mat0 && mat0->ops->destroysubmatrices) {
7375: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7376: } else {
7377: PetscCall(MatDestroyMatrices(n, mat));
7378: }
7379: PetscFunctionReturn(PETSC_SUCCESS);
7380: }
7382: /*@
7383: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7385: Collective
7387: Input Parameter:
7388: . mat - the matrix
7390: Output Parameter:
7391: . matstruct - the sequential matrix with the nonzero structure of `mat`
7393: Level: developer
7395: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7396: @*/
7397: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7398: {
7399: PetscFunctionBegin;
7401: PetscAssertPointer(matstruct, 2);
7404: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7405: MatCheckPreallocated(mat, 1);
7407: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7408: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7409: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7410: PetscFunctionReturn(PETSC_SUCCESS);
7411: }
7413: /*@C
7414: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7416: Collective
7418: Input Parameter:
7419: . mat - the matrix
7421: Level: advanced
7423: Note:
7424: This is not needed, one can just call `MatDestroy()`
7426: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7427: @*/
7428: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7429: {
7430: PetscFunctionBegin;
7431: PetscAssertPointer(mat, 1);
7432: PetscCall(MatDestroy(mat));
7433: PetscFunctionReturn(PETSC_SUCCESS);
7434: }
7436: /*@
7437: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7438: replaces the index sets by larger ones that represent submatrices with
7439: additional overlap.
7441: Collective
7443: Input Parameters:
7444: + mat - the matrix
7445: . n - the number of index sets
7446: . is - the array of index sets (these index sets will changed during the call)
7447: - ov - the additional overlap requested
7449: Options Database Key:
7450: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7452: Level: developer
7454: Note:
7455: The computed overlap preserves the matrix block sizes when the blocks are square.
7456: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7457: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7459: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7460: @*/
7461: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7462: {
7463: PetscInt i, bs, cbs;
7465: PetscFunctionBegin;
7469: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7470: if (n) {
7471: PetscAssertPointer(is, 3);
7473: }
7474: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7475: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7476: MatCheckPreallocated(mat, 1);
7478: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7479: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7480: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7481: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7482: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7483: if (bs == cbs) {
7484: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7485: }
7486: PetscFunctionReturn(PETSC_SUCCESS);
7487: }
7489: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7491: /*@
7492: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7493: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7494: additional overlap.
7496: Collective
7498: Input Parameters:
7499: + mat - the matrix
7500: . n - the number of index sets
7501: . is - the array of index sets (these index sets will changed during the call)
7502: - ov - the additional overlap requested
7504: ` Options Database Key:
7505: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7507: Level: developer
7509: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7510: @*/
7511: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7512: {
7513: PetscInt i;
7515: PetscFunctionBegin;
7518: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7519: if (n) {
7520: PetscAssertPointer(is, 3);
7522: }
7523: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7524: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7525: MatCheckPreallocated(mat, 1);
7526: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7527: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7528: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7529: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7530: PetscFunctionReturn(PETSC_SUCCESS);
7531: }
7533: /*@
7534: MatGetBlockSize - Returns the matrix block size.
7536: Not Collective
7538: Input Parameter:
7539: . mat - the matrix
7541: Output Parameter:
7542: . bs - block size
7544: Level: intermediate
7546: Notes:
7547: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7549: If the block size has not been set yet this routine returns 1.
7551: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7552: @*/
7553: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7554: {
7555: PetscFunctionBegin;
7557: PetscAssertPointer(bs, 2);
7558: *bs = PetscAbs(mat->rmap->bs);
7559: PetscFunctionReturn(PETSC_SUCCESS);
7560: }
7562: /*@
7563: MatGetBlockSizes - Returns the matrix block row and column sizes.
7565: Not Collective
7567: Input Parameter:
7568: . mat - the matrix
7570: Output Parameters:
7571: + rbs - row block size
7572: - cbs - column block size
7574: Level: intermediate
7576: Notes:
7577: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7578: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7580: If a block size has not been set yet this routine returns 1.
7582: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7583: @*/
7584: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7585: {
7586: PetscFunctionBegin;
7588: if (rbs) PetscAssertPointer(rbs, 2);
7589: if (cbs) PetscAssertPointer(cbs, 3);
7590: if (rbs) *rbs = PetscAbs(mat->rmap->bs);
7591: if (cbs) *cbs = PetscAbs(mat->cmap->bs);
7592: PetscFunctionReturn(PETSC_SUCCESS);
7593: }
7595: /*@
7596: MatSetBlockSize - Sets the matrix block size.
7598: Logically Collective
7600: Input Parameters:
7601: + mat - the matrix
7602: - bs - block size
7604: Level: intermediate
7606: Notes:
7607: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7608: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7610: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7611: is compatible with the matrix local sizes.
7613: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7614: @*/
7615: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7616: {
7617: PetscFunctionBegin;
7620: PetscCall(MatSetBlockSizes(mat, bs, bs));
7621: PetscFunctionReturn(PETSC_SUCCESS);
7622: }
7624: typedef struct {
7625: PetscInt n;
7626: IS *is;
7627: Mat *mat;
7628: PetscObjectState nonzerostate;
7629: Mat C;
7630: } EnvelopeData;
7632: static PetscErrorCode EnvelopeDataDestroy(void *ptr)
7633: {
7634: EnvelopeData *edata = (EnvelopeData *)ptr;
7636: PetscFunctionBegin;
7637: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7638: PetscCall(PetscFree(edata->is));
7639: PetscCall(PetscFree(edata));
7640: PetscFunctionReturn(PETSC_SUCCESS);
7641: }
7643: /*@
7644: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7645: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7647: Collective
7649: Input Parameter:
7650: . mat - the matrix
7652: Level: intermediate
7654: Notes:
7655: There can be zeros within the blocks
7657: The blocks can overlap between processes, including laying on more than two processes
7659: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7660: @*/
7661: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7662: {
7663: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7664: PetscInt *diag, *odiag, sc;
7665: VecScatter scatter;
7666: PetscScalar *seqv;
7667: const PetscScalar *parv;
7668: const PetscInt *ia, *ja;
7669: PetscBool set, flag, done;
7670: Mat AA = mat, A;
7671: MPI_Comm comm;
7672: PetscMPIInt rank, size, tag;
7673: MPI_Status status;
7674: PetscContainer container;
7675: EnvelopeData *edata;
7676: Vec seq, par;
7677: IS isglobal;
7679: PetscFunctionBegin;
7681: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7682: if (!set || !flag) {
7683: /* TODO: only needs nonzero structure of transpose */
7684: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7685: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7686: }
7687: PetscCall(MatAIJGetLocalMat(AA, &A));
7688: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7689: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7691: PetscCall(MatGetLocalSize(mat, &n, NULL));
7692: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7693: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7694: PetscCallMPI(MPI_Comm_size(comm, &size));
7695: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7697: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7699: if (rank > 0) {
7700: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7701: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7702: }
7703: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7704: for (i = 0; i < n; i++) {
7705: env = PetscMax(env, ja[ia[i + 1] - 1]);
7706: II = rstart + i;
7707: if (env == II) {
7708: starts[lblocks] = tbs;
7709: sizes[lblocks++] = 1 + II - tbs;
7710: tbs = 1 + II;
7711: }
7712: }
7713: if (rank < size - 1) {
7714: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7715: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7716: }
7718: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7719: if (!set || !flag) PetscCall(MatDestroy(&AA));
7720: PetscCall(MatDestroy(&A));
7722: PetscCall(PetscNew(&edata));
7723: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7724: edata->n = lblocks;
7725: /* create IS needed for extracting blocks from the original matrix */
7726: PetscCall(PetscMalloc1(lblocks, &edata->is));
7727: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7729: /* Create the resulting inverse matrix structure with preallocation information */
7730: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7731: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7732: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7733: PetscCall(MatSetType(edata->C, MATAIJ));
7735: /* Communicate the start and end of each row, from each block to the correct rank */
7736: /* TODO: Use PetscSF instead of VecScatter */
7737: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7738: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7739: PetscCall(VecGetArrayWrite(seq, &seqv));
7740: for (PetscInt i = 0; i < lblocks; i++) {
7741: for (PetscInt j = 0; j < sizes[i]; j++) {
7742: seqv[cnt] = starts[i];
7743: seqv[cnt + 1] = starts[i] + sizes[i];
7744: cnt += 2;
7745: }
7746: }
7747: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7748: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7749: sc -= cnt;
7750: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7751: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7752: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7753: PetscCall(ISDestroy(&isglobal));
7754: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7755: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7756: PetscCall(VecScatterDestroy(&scatter));
7757: PetscCall(VecDestroy(&seq));
7758: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7759: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7760: PetscCall(VecGetArrayRead(par, &parv));
7761: cnt = 0;
7762: PetscCall(MatGetSize(mat, NULL, &n));
7763: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7764: PetscInt start, end, d = 0, od = 0;
7766: start = (PetscInt)PetscRealPart(parv[cnt]);
7767: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7768: cnt += 2;
7770: if (start < cstart) {
7771: od += cstart - start + n - cend;
7772: d += cend - cstart;
7773: } else if (start < cend) {
7774: od += n - cend;
7775: d += cend - start;
7776: } else od += n - start;
7777: if (end <= cstart) {
7778: od -= cstart - end + n - cend;
7779: d -= cend - cstart;
7780: } else if (end < cend) {
7781: od -= n - cend;
7782: d -= cend - end;
7783: } else od -= n - end;
7785: odiag[i] = od;
7786: diag[i] = d;
7787: }
7788: PetscCall(VecRestoreArrayRead(par, &parv));
7789: PetscCall(VecDestroy(&par));
7790: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7791: PetscCall(PetscFree2(diag, odiag));
7792: PetscCall(PetscFree2(sizes, starts));
7794: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7795: PetscCall(PetscContainerSetPointer(container, edata));
7796: PetscCall(PetscContainerSetUserDestroy(container, (PetscErrorCode(*)(void *))EnvelopeDataDestroy));
7797: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7798: PetscCall(PetscObjectDereference((PetscObject)container));
7799: PetscFunctionReturn(PETSC_SUCCESS);
7800: }
7802: /*@
7803: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7805: Collective
7807: Input Parameters:
7808: + A - the matrix
7809: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7811: Output Parameter:
7812: . C - matrix with inverted block diagonal of `A`
7814: Level: advanced
7816: Note:
7817: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7819: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7820: @*/
7821: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7822: {
7823: PetscContainer container;
7824: EnvelopeData *edata;
7825: PetscObjectState nonzerostate;
7827: PetscFunctionBegin;
7828: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7829: if (!container) {
7830: PetscCall(MatComputeVariableBlockEnvelope(A));
7831: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7832: }
7833: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7834: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7835: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7836: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7838: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7839: *C = edata->C;
7841: for (PetscInt i = 0; i < edata->n; i++) {
7842: Mat D;
7843: PetscScalar *dvalues;
7845: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7846: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7847: PetscCall(MatSeqDenseInvert(D));
7848: PetscCall(MatDenseGetArray(D, &dvalues));
7849: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7850: PetscCall(MatDestroy(&D));
7851: }
7852: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7853: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7854: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7855: PetscFunctionReturn(PETSC_SUCCESS);
7856: }
7858: /*@
7859: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7861: Not Collective
7863: Input Parameters:
7864: + mat - the matrix
7865: . nblocks - the number of blocks on this process, each block can only exist on a single process
7866: - bsizes - the block sizes
7868: Level: intermediate
7870: Notes:
7871: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7873: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7875: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7876: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7877: @*/
7878: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
7879: {
7880: PetscInt ncnt = 0, nlocal;
7882: PetscFunctionBegin;
7884: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7885: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
7886: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
7887: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7888: PetscCall(PetscFree(mat->bsizes));
7889: mat->nblocks = nblocks;
7890: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7891: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7892: PetscFunctionReturn(PETSC_SUCCESS);
7893: }
7895: /*@C
7896: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7898: Not Collective; No Fortran Support
7900: Input Parameter:
7901: . mat - the matrix
7903: Output Parameters:
7904: + nblocks - the number of blocks on this process
7905: - bsizes - the block sizes
7907: Level: intermediate
7909: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7910: @*/
7911: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
7912: {
7913: PetscFunctionBegin;
7915: if (nblocks) *nblocks = mat->nblocks;
7916: if (bsizes) *bsizes = mat->bsizes;
7917: PetscFunctionReturn(PETSC_SUCCESS);
7918: }
7920: /*@
7921: MatSetBlockSizes - Sets the matrix block row and column sizes.
7923: Logically Collective
7925: Input Parameters:
7926: + mat - the matrix
7927: . rbs - row block size
7928: - cbs - column block size
7930: Level: intermediate
7932: Notes:
7933: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7934: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7935: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7937: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7938: are compatible with the matrix local sizes.
7940: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
7942: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7943: @*/
7944: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
7945: {
7946: PetscFunctionBegin;
7950: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
7951: if (mat->rmap->refcnt) {
7952: ISLocalToGlobalMapping l2g = NULL;
7953: PetscLayout nmap = NULL;
7955: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
7956: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
7957: PetscCall(PetscLayoutDestroy(&mat->rmap));
7958: mat->rmap = nmap;
7959: mat->rmap->mapping = l2g;
7960: }
7961: if (mat->cmap->refcnt) {
7962: ISLocalToGlobalMapping l2g = NULL;
7963: PetscLayout nmap = NULL;
7965: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
7966: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
7967: PetscCall(PetscLayoutDestroy(&mat->cmap));
7968: mat->cmap = nmap;
7969: mat->cmap->mapping = l2g;
7970: }
7971: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
7972: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
7973: PetscFunctionReturn(PETSC_SUCCESS);
7974: }
7976: /*@
7977: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7979: Logically Collective
7981: Input Parameters:
7982: + mat - the matrix
7983: . fromRow - matrix from which to copy row block size
7984: - fromCol - matrix from which to copy column block size (can be same as fromRow)
7986: Level: developer
7988: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
7989: @*/
7990: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
7991: {
7992: PetscFunctionBegin;
7996: if (fromRow->rmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
7997: if (fromCol->cmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
7998: PetscFunctionReturn(PETSC_SUCCESS);
7999: }
8001: /*@
8002: MatResidual - Default routine to calculate the residual r = b - Ax
8004: Collective
8006: Input Parameters:
8007: + mat - the matrix
8008: . b - the right-hand-side
8009: - x - the approximate solution
8011: Output Parameter:
8012: . r - location to store the residual
8014: Level: developer
8016: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8017: @*/
8018: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8019: {
8020: PetscFunctionBegin;
8026: MatCheckPreallocated(mat, 1);
8027: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8028: if (!mat->ops->residual) {
8029: PetscCall(MatMult(mat, x, r));
8030: PetscCall(VecAYPX(r, -1.0, b));
8031: } else {
8032: PetscUseTypeMethod(mat, residual, b, x, r);
8033: }
8034: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8035: PetscFunctionReturn(PETSC_SUCCESS);
8036: }
8038: /*MC
8039: MatGetRowIJF90 - Obtains the compressed row storage i and j indices for the local rows of a sparse matrix
8041: Synopsis:
8042: MatGetRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
8044: Not Collective
8046: Input Parameters:
8047: + A - the matrix
8048: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8049: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8050: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8051: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8052: always used.
8054: Output Parameters:
8055: + n - number of local rows in the (possibly compressed) matrix
8056: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
8057: . ja - the column indices
8058: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8059: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8061: Level: developer
8063: Note:
8064: Use `MatRestoreRowIJF90()` when you no longer need access to the data
8066: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatRestoreRowIJF90()`
8067: M*/
8069: /*MC
8070: MatRestoreRowIJF90 - restores the compressed row storage i and j indices for the local rows of a sparse matrix obtained with `MatGetRowIJF90()`
8072: Synopsis:
8073: MatRestoreRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
8075: Not Collective
8077: Input Parameters:
8078: + A - the matrix
8079: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8080: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8081: inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8082: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8083: always used.
8084: . n - number of local rows in the (possibly compressed) matrix
8085: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
8086: . ja - the column indices
8087: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8088: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8090: Level: developer
8092: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatGetRowIJF90()`
8093: M*/
8095: /*@C
8096: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8098: Collective
8100: Input Parameters:
8101: + mat - the matrix
8102: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8103: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8104: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8105: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8106: always used.
8108: Output Parameters:
8109: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8110: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8111: . ja - the column indices, use `NULL` if not needed
8112: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8113: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8115: Level: developer
8117: Notes:
8118: You CANNOT change any of the ia[] or ja[] values.
8120: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8122: Fortran Notes:
8123: Use
8124: .vb
8125: PetscInt, pointer :: ia(:),ja(:)
8126: call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8127: ! Access the ith and jth entries via ia(i) and ja(j)
8128: .ve
8130: `MatGetRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatGetRowIJF90()`
8132: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetRowIJF90()`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8133: @*/
8134: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8135: {
8136: PetscFunctionBegin;
8139: if (n) PetscAssertPointer(n, 5);
8140: if (ia) PetscAssertPointer(ia, 6);
8141: if (ja) PetscAssertPointer(ja, 7);
8142: if (done) PetscAssertPointer(done, 8);
8143: MatCheckPreallocated(mat, 1);
8144: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8145: else {
8146: if (done) *done = PETSC_TRUE;
8147: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8148: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8149: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8150: }
8151: PetscFunctionReturn(PETSC_SUCCESS);
8152: }
8154: /*@C
8155: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8157: Collective
8159: Input Parameters:
8160: + mat - the matrix
8161: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8162: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8163: symmetrized
8164: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8165: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8166: always used.
8167: . n - number of columns in the (possibly compressed) matrix
8168: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8169: - ja - the row indices
8171: Output Parameter:
8172: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8174: Level: developer
8176: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8177: @*/
8178: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8179: {
8180: PetscFunctionBegin;
8183: PetscAssertPointer(n, 5);
8184: if (ia) PetscAssertPointer(ia, 6);
8185: if (ja) PetscAssertPointer(ja, 7);
8186: PetscAssertPointer(done, 8);
8187: MatCheckPreallocated(mat, 1);
8188: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8189: else {
8190: *done = PETSC_TRUE;
8191: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8192: }
8193: PetscFunctionReturn(PETSC_SUCCESS);
8194: }
8196: /*@C
8197: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8199: Collective
8201: Input Parameters:
8202: + mat - the matrix
8203: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8204: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8205: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8206: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8207: always used.
8208: . n - size of (possibly compressed) matrix
8209: . ia - the row pointers
8210: - ja - the column indices
8212: Output Parameter:
8213: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8215: Level: developer
8217: Note:
8218: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8219: us of the array after it has been restored. If you pass `NULL`, it will
8220: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8222: Fortran Note:
8223: `MatRestoreRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatRestoreRowIJF90()`
8225: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreRowIJF90()`, `MatRestoreColumnIJ()`
8226: @*/
8227: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8228: {
8229: PetscFunctionBegin;
8232: if (ia) PetscAssertPointer(ia, 6);
8233: if (ja) PetscAssertPointer(ja, 7);
8234: if (done) PetscAssertPointer(done, 8);
8235: MatCheckPreallocated(mat, 1);
8237: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8238: else {
8239: if (done) *done = PETSC_TRUE;
8240: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8241: if (n) *n = 0;
8242: if (ia) *ia = NULL;
8243: if (ja) *ja = NULL;
8244: }
8245: PetscFunctionReturn(PETSC_SUCCESS);
8246: }
8248: /*@C
8249: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8251: Collective
8253: Input Parameters:
8254: + mat - the matrix
8255: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8256: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8257: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8258: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8259: always used.
8261: Output Parameters:
8262: + n - size of (possibly compressed) matrix
8263: . ia - the column pointers
8264: . ja - the row indices
8265: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8267: Level: developer
8269: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8270: @*/
8271: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8272: {
8273: PetscFunctionBegin;
8276: if (ia) PetscAssertPointer(ia, 6);
8277: if (ja) PetscAssertPointer(ja, 7);
8278: PetscAssertPointer(done, 8);
8279: MatCheckPreallocated(mat, 1);
8281: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8282: else {
8283: *done = PETSC_TRUE;
8284: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8285: if (n) *n = 0;
8286: if (ia) *ia = NULL;
8287: if (ja) *ja = NULL;
8288: }
8289: PetscFunctionReturn(PETSC_SUCCESS);
8290: }
8292: /*@
8293: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8294: `MatGetColumnIJ()`.
8296: Collective
8298: Input Parameters:
8299: + mat - the matrix
8300: . ncolors - maximum color value
8301: . n - number of entries in colorarray
8302: - colorarray - array indicating color for each column
8304: Output Parameter:
8305: . iscoloring - coloring generated using colorarray information
8307: Level: developer
8309: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8310: @*/
8311: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8312: {
8313: PetscFunctionBegin;
8316: PetscAssertPointer(colorarray, 4);
8317: PetscAssertPointer(iscoloring, 5);
8318: MatCheckPreallocated(mat, 1);
8320: if (!mat->ops->coloringpatch) {
8321: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8322: } else {
8323: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8324: }
8325: PetscFunctionReturn(PETSC_SUCCESS);
8326: }
8328: /*@
8329: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8331: Logically Collective
8333: Input Parameter:
8334: . mat - the factored matrix to be reset
8336: Level: developer
8338: Notes:
8339: This routine should be used only with factored matrices formed by in-place
8340: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8341: format). This option can save memory, for example, when solving nonlinear
8342: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8343: ILU(0) preconditioner.
8345: One can specify in-place ILU(0) factorization by calling
8346: .vb
8347: PCType(pc,PCILU);
8348: PCFactorSeUseInPlace(pc);
8349: .ve
8350: or by using the options -pc_type ilu -pc_factor_in_place
8352: In-place factorization ILU(0) can also be used as a local
8353: solver for the blocks within the block Jacobi or additive Schwarz
8354: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8355: for details on setting local solver options.
8357: Most users should employ the `KSP` interface for linear solvers
8358: instead of working directly with matrix algebra routines such as this.
8359: See, e.g., `KSPCreate()`.
8361: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8362: @*/
8363: PetscErrorCode MatSetUnfactored(Mat mat)
8364: {
8365: PetscFunctionBegin;
8368: MatCheckPreallocated(mat, 1);
8369: mat->factortype = MAT_FACTOR_NONE;
8370: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8371: PetscUseTypeMethod(mat, setunfactored);
8372: PetscFunctionReturn(PETSC_SUCCESS);
8373: }
8375: /*MC
8376: MatDenseGetArrayF90 - Accesses a matrix array from Fortran
8378: Synopsis:
8379: MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8381: Not Collective
8383: Input Parameter:
8384: . x - matrix
8386: Output Parameters:
8387: + xx_v - the Fortran pointer to the array
8388: - ierr - error code
8390: Example of Usage:
8391: .vb
8392: PetscScalar, pointer xx_v(:,:)
8393: ....
8394: call MatDenseGetArrayF90(x,xx_v,ierr)
8395: a = xx_v(3)
8396: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8397: .ve
8399: Level: advanced
8401: .seealso: [](ch_matrices), `Mat`, `MatDenseRestoreArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJGetArrayF90()`
8402: M*/
8404: /*MC
8405: MatDenseRestoreArrayF90 - Restores a matrix array that has been
8406: accessed with `MatDenseGetArrayF90()`.
8408: Synopsis:
8409: MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8411: Not Collective
8413: Input Parameters:
8414: + x - matrix
8415: - xx_v - the Fortran90 pointer to the array
8417: Output Parameter:
8418: . ierr - error code
8420: Example of Usage:
8421: .vb
8422: PetscScalar, pointer xx_v(:,:)
8423: ....
8424: call MatDenseGetArrayF90(x,xx_v,ierr)
8425: a = xx_v(3)
8426: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8427: .ve
8429: Level: advanced
8431: .seealso: [](ch_matrices), `Mat`, `MatDenseGetArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJRestoreArrayF90()`
8432: M*/
8434: /*MC
8435: MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran.
8437: Synopsis:
8438: MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8440: Not Collective
8442: Input Parameter:
8443: . x - matrix
8445: Output Parameters:
8446: + xx_v - the Fortran pointer to the array
8447: - ierr - error code
8449: Example of Usage:
8450: .vb
8451: PetscScalar, pointer xx_v(:)
8452: ....
8453: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8454: a = xx_v(3)
8455: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8456: .ve
8458: Level: advanced
8460: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseGetArrayF90()`
8461: M*/
8463: /*MC
8464: MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
8465: accessed with `MatSeqAIJGetArrayF90()`.
8467: Synopsis:
8468: MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8470: Not Collective
8472: Input Parameters:
8473: + x - matrix
8474: - xx_v - the Fortran90 pointer to the array
8476: Output Parameter:
8477: . ierr - error code
8479: Example of Usage:
8480: .vb
8481: PetscScalar, pointer xx_v(:)
8482: ....
8483: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8484: a = xx_v(3)
8485: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8486: .ve
8488: Level: advanced
8490: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseRestoreArrayF90()`
8491: M*/
8493: /*@
8494: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8495: as the original matrix.
8497: Collective
8499: Input Parameters:
8500: + mat - the original matrix
8501: . isrow - parallel `IS` containing the rows this processor should obtain
8502: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8503: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8505: Output Parameter:
8506: . newmat - the new submatrix, of the same type as the original matrix
8508: Level: advanced
8510: Notes:
8511: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8513: Some matrix types place restrictions on the row and column indices, such
8514: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8515: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8517: The index sets may not have duplicate entries.
8519: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8520: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8521: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8522: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8523: you are finished using it.
8525: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8526: the input matrix.
8528: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8530: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8531: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8533: Example usage:
8534: Consider the following 8x8 matrix with 34 non-zero values, that is
8535: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8536: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8537: as follows
8538: .vb
8539: 1 2 0 | 0 3 0 | 0 4
8540: Proc0 0 5 6 | 7 0 0 | 8 0
8541: 9 0 10 | 11 0 0 | 12 0
8542: -------------------------------------
8543: 13 0 14 | 15 16 17 | 0 0
8544: Proc1 0 18 0 | 19 20 21 | 0 0
8545: 0 0 0 | 22 23 0 | 24 0
8546: -------------------------------------
8547: Proc2 25 26 27 | 0 0 28 | 29 0
8548: 30 0 0 | 31 32 33 | 0 34
8549: .ve
8551: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8553: .vb
8554: 2 0 | 0 3 0 | 0
8555: Proc0 5 6 | 7 0 0 | 8
8556: -------------------------------
8557: Proc1 18 0 | 19 20 21 | 0
8558: -------------------------------
8559: Proc2 26 27 | 0 0 28 | 29
8560: 0 0 | 31 32 33 | 0
8561: .ve
8563: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8564: @*/
8565: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8566: {
8567: PetscMPIInt size;
8568: Mat *local;
8569: IS iscoltmp;
8570: PetscBool flg;
8572: PetscFunctionBegin;
8576: PetscAssertPointer(newmat, 5);
8579: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8580: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8582: MatCheckPreallocated(mat, 1);
8583: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8585: if (!iscol || isrow == iscol) {
8586: PetscBool stride;
8587: PetscMPIInt grabentirematrix = 0, grab;
8588: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8589: if (stride) {
8590: PetscInt first, step, n, rstart, rend;
8591: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8592: if (step == 1) {
8593: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8594: if (rstart == first) {
8595: PetscCall(ISGetLocalSize(isrow, &n));
8596: if (n == rend - rstart) grabentirematrix = 1;
8597: }
8598: }
8599: }
8600: PetscCall(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8601: if (grab) {
8602: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8603: if (cll == MAT_INITIAL_MATRIX) {
8604: *newmat = mat;
8605: PetscCall(PetscObjectReference((PetscObject)mat));
8606: }
8607: PetscFunctionReturn(PETSC_SUCCESS);
8608: }
8609: }
8611: if (!iscol) {
8612: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8613: } else {
8614: iscoltmp = iscol;
8615: }
8617: /* if original matrix is on just one processor then use submatrix generated */
8618: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8619: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8620: goto setproperties;
8621: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8622: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8623: *newmat = *local;
8624: PetscCall(PetscFree(local));
8625: goto setproperties;
8626: } else if (!mat->ops->createsubmatrix) {
8627: /* Create a new matrix type that implements the operation using the full matrix */
8628: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8629: switch (cll) {
8630: case MAT_INITIAL_MATRIX:
8631: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8632: break;
8633: case MAT_REUSE_MATRIX:
8634: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8635: break;
8636: default:
8637: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8638: }
8639: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8640: goto setproperties;
8641: }
8643: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8644: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8645: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8647: setproperties:
8648: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8649: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8650: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8651: }
8652: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8653: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8654: PetscFunctionReturn(PETSC_SUCCESS);
8655: }
8657: /*@
8658: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8660: Not Collective
8662: Input Parameters:
8663: + A - the matrix we wish to propagate options from
8664: - B - the matrix we wish to propagate options to
8666: Level: beginner
8668: Note:
8669: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8671: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8672: @*/
8673: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8674: {
8675: PetscFunctionBegin;
8678: B->symmetry_eternal = A->symmetry_eternal;
8679: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8680: B->symmetric = A->symmetric;
8681: B->structurally_symmetric = A->structurally_symmetric;
8682: B->spd = A->spd;
8683: B->hermitian = A->hermitian;
8684: PetscFunctionReturn(PETSC_SUCCESS);
8685: }
8687: /*@
8688: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8689: used during the assembly process to store values that belong to
8690: other processors.
8692: Not Collective
8694: Input Parameters:
8695: + mat - the matrix
8696: . size - the initial size of the stash.
8697: - bsize - the initial size of the block-stash(if used).
8699: Options Database Keys:
8700: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8701: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8703: Level: intermediate
8705: Notes:
8706: The block-stash is used for values set with `MatSetValuesBlocked()` while
8707: the stash is used for values set with `MatSetValues()`
8709: Run with the option -info and look for output of the form
8710: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8711: to determine the appropriate value, MM, to use for size and
8712: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8713: to determine the value, BMM to use for bsize
8715: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8716: @*/
8717: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8718: {
8719: PetscFunctionBegin;
8722: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8723: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8724: PetscFunctionReturn(PETSC_SUCCESS);
8725: }
8727: /*@
8728: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8729: the matrix
8731: Neighbor-wise Collective
8733: Input Parameters:
8734: + A - the matrix
8735: . x - the vector to be multiplied by the interpolation operator
8736: - y - the vector to be added to the result
8738: Output Parameter:
8739: . w - the resulting vector
8741: Level: intermediate
8743: Notes:
8744: `w` may be the same vector as `y`.
8746: This allows one to use either the restriction or interpolation (its transpose)
8747: matrix to do the interpolation
8749: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8750: @*/
8751: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8752: {
8753: PetscInt M, N, Ny;
8755: PetscFunctionBegin;
8760: PetscCall(MatGetSize(A, &M, &N));
8761: PetscCall(VecGetSize(y, &Ny));
8762: if (M == Ny) {
8763: PetscCall(MatMultAdd(A, x, y, w));
8764: } else {
8765: PetscCall(MatMultTransposeAdd(A, x, y, w));
8766: }
8767: PetscFunctionReturn(PETSC_SUCCESS);
8768: }
8770: /*@
8771: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8772: the matrix
8774: Neighbor-wise Collective
8776: Input Parameters:
8777: + A - the matrix
8778: - x - the vector to be interpolated
8780: Output Parameter:
8781: . y - the resulting vector
8783: Level: intermediate
8785: Note:
8786: This allows one to use either the restriction or interpolation (its transpose)
8787: matrix to do the interpolation
8789: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8790: @*/
8791: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8792: {
8793: PetscInt M, N, Ny;
8795: PetscFunctionBegin;
8799: PetscCall(MatGetSize(A, &M, &N));
8800: PetscCall(VecGetSize(y, &Ny));
8801: if (M == Ny) {
8802: PetscCall(MatMult(A, x, y));
8803: } else {
8804: PetscCall(MatMultTranspose(A, x, y));
8805: }
8806: PetscFunctionReturn(PETSC_SUCCESS);
8807: }
8809: /*@
8810: MatRestrict - $y = A*x$ or $A^T*x$
8812: Neighbor-wise Collective
8814: Input Parameters:
8815: + A - the matrix
8816: - x - the vector to be restricted
8818: Output Parameter:
8819: . y - the resulting vector
8821: Level: intermediate
8823: Note:
8824: This allows one to use either the restriction or interpolation (its transpose)
8825: matrix to do the restriction
8827: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8828: @*/
8829: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8830: {
8831: PetscInt M, N, Nx;
8833: PetscFunctionBegin;
8837: PetscCall(MatGetSize(A, &M, &N));
8838: PetscCall(VecGetSize(x, &Nx));
8839: if (M == Nx) {
8840: PetscCall(MatMultTranspose(A, x, y));
8841: } else {
8842: PetscCall(MatMult(A, x, y));
8843: }
8844: PetscFunctionReturn(PETSC_SUCCESS);
8845: }
8847: /*@
8848: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8850: Neighbor-wise Collective
8852: Input Parameters:
8853: + A - the matrix
8854: . x - the input dense matrix to be multiplied
8855: - w - the input dense matrix to be added to the result
8857: Output Parameter:
8858: . y - the output dense matrix
8860: Level: intermediate
8862: Note:
8863: This allows one to use either the restriction or interpolation (its transpose)
8864: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8865: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8867: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8868: @*/
8869: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8870: {
8871: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8872: PetscBool trans = PETSC_TRUE;
8873: MatReuse reuse = MAT_INITIAL_MATRIX;
8875: PetscFunctionBegin;
8881: PetscCall(MatGetSize(A, &M, &N));
8882: PetscCall(MatGetSize(x, &Mx, &Nx));
8883: if (N == Mx) trans = PETSC_FALSE;
8884: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8885: Mo = trans ? N : M;
8886: if (*y) {
8887: PetscCall(MatGetSize(*y, &My, &Ny));
8888: if (Mo == My && Nx == Ny) {
8889: reuse = MAT_REUSE_MATRIX;
8890: } else {
8891: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8892: PetscCall(MatDestroy(y));
8893: }
8894: }
8896: if (w && *y == w) { /* this is to minimize changes in PCMG */
8897: PetscBool flg;
8899: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8900: if (w) {
8901: PetscInt My, Ny, Mw, Nw;
8903: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8904: PetscCall(MatGetSize(*y, &My, &Ny));
8905: PetscCall(MatGetSize(w, &Mw, &Nw));
8906: if (!flg || My != Mw || Ny != Nw) w = NULL;
8907: }
8908: if (!w) {
8909: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8910: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8911: PetscCall(PetscObjectDereference((PetscObject)w));
8912: } else {
8913: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8914: }
8915: }
8916: if (!trans) {
8917: PetscCall(MatMatMult(A, x, reuse, PETSC_DEFAULT, y));
8918: } else {
8919: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DEFAULT, y));
8920: }
8921: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8922: PetscFunctionReturn(PETSC_SUCCESS);
8923: }
8925: /*@
8926: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8928: Neighbor-wise Collective
8930: Input Parameters:
8931: + A - the matrix
8932: - x - the input dense matrix
8934: Output Parameter:
8935: . y - the output dense matrix
8937: Level: intermediate
8939: Note:
8940: This allows one to use either the restriction or interpolation (its transpose)
8941: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8942: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8944: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8945: @*/
8946: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8947: {
8948: PetscFunctionBegin;
8949: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8950: PetscFunctionReturn(PETSC_SUCCESS);
8951: }
8953: /*@
8954: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8956: Neighbor-wise Collective
8958: Input Parameters:
8959: + A - the matrix
8960: - x - the input dense matrix
8962: Output Parameter:
8963: . y - the output dense matrix
8965: Level: intermediate
8967: Note:
8968: This allows one to use either the restriction or interpolation (its transpose)
8969: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
8970: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8972: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8973: @*/
8974: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8975: {
8976: PetscFunctionBegin;
8977: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8978: PetscFunctionReturn(PETSC_SUCCESS);
8979: }
8981: /*@
8982: MatGetNullSpace - retrieves the null space of a matrix.
8984: Logically Collective
8986: Input Parameters:
8987: + mat - the matrix
8988: - nullsp - the null space object
8990: Level: developer
8992: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8993: @*/
8994: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8995: {
8996: PetscFunctionBegin;
8998: PetscAssertPointer(nullsp, 2);
8999: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
9000: PetscFunctionReturn(PETSC_SUCCESS);
9001: }
9003: /*@C
9004: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
9006: Logically Collective
9008: Input Parameters:
9009: + n - the number of matrices
9010: - mat - the array of matrices
9012: Output Parameters:
9013: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
9015: Level: developer
9017: Note:
9018: Call `MatRestoreNullspaces()` to provide these to another array of matrices
9020: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9021: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
9022: @*/
9023: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9024: {
9025: PetscFunctionBegin;
9026: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9027: PetscAssertPointer(mat, 2);
9028: PetscAssertPointer(nullsp, 3);
9030: PetscCall(PetscCalloc1(3 * n, nullsp));
9031: for (PetscInt i = 0; i < n; i++) {
9033: (*nullsp)[i] = mat[i]->nullsp;
9034: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
9035: (*nullsp)[n + i] = mat[i]->nearnullsp;
9036: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
9037: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
9038: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
9039: }
9040: PetscFunctionReturn(PETSC_SUCCESS);
9041: }
9043: /*@C
9044: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
9046: Logically Collective
9048: Input Parameters:
9049: + n - the number of matrices
9050: . mat - the array of matrices
9051: - nullsp - an array of null spaces
9053: Level: developer
9055: Note:
9056: Call `MatGetNullSpaces()` to create `nullsp`
9058: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9059: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
9060: @*/
9061: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9062: {
9063: PetscFunctionBegin;
9064: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9065: PetscAssertPointer(mat, 2);
9066: PetscAssertPointer(nullsp, 3);
9067: PetscAssertPointer(*nullsp, 3);
9069: for (PetscInt i = 0; i < n; i++) {
9071: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
9072: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
9073: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
9074: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
9075: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
9076: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
9077: }
9078: PetscCall(PetscFree(*nullsp));
9079: PetscFunctionReturn(PETSC_SUCCESS);
9080: }
9082: /*@
9083: MatSetNullSpace - attaches a null space to a matrix.
9085: Logically Collective
9087: Input Parameters:
9088: + mat - the matrix
9089: - nullsp - the null space object
9091: Level: advanced
9093: Notes:
9094: This null space is used by the `KSP` linear solvers to solve singular systems.
9096: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
9098: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge to
9099: to zero but the linear system will still be solved in a least squares sense.
9101: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
9102: the domain of a matrix A (from $R^n$ to $R^m$ (m rows, n columns) $R^n$ = the direct sum of the null space of A, n(A), + the range of $A^T$, $R(A^T)$.
9103: Similarly $R^m$ = direct sum n($A^T$) + R(A). Hence the linear system $A x = b$ has a solution only if b in R(A) (or correspondingly b is orthogonal to
9104: n($A^T$)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
9105: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n($A^T$).
9106: This \hat{b} can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
9108: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one as called
9109: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9110: routine also automatically calls `MatSetTransposeNullSpace()`.
9112: The user should call `MatNullSpaceDestroy()`.
9114: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9115: `KSPSetPCSide()`
9116: @*/
9117: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9118: {
9119: PetscFunctionBegin;
9122: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9123: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9124: mat->nullsp = nullsp;
9125: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9126: PetscFunctionReturn(PETSC_SUCCESS);
9127: }
9129: /*@
9130: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
9132: Logically Collective
9134: Input Parameters:
9135: + mat - the matrix
9136: - nullsp - the null space object
9138: Level: developer
9140: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9141: @*/
9142: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9143: {
9144: PetscFunctionBegin;
9147: PetscAssertPointer(nullsp, 2);
9148: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9149: PetscFunctionReturn(PETSC_SUCCESS);
9150: }
9152: /*@
9153: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
9155: Logically Collective
9157: Input Parameters:
9158: + mat - the matrix
9159: - nullsp - the null space object
9161: Level: advanced
9163: Notes:
9164: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9166: See `MatSetNullSpace()`
9168: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9169: @*/
9170: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9171: {
9172: PetscFunctionBegin;
9175: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9176: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9177: mat->transnullsp = nullsp;
9178: PetscFunctionReturn(PETSC_SUCCESS);
9179: }
9181: /*@
9182: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9183: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9185: Logically Collective
9187: Input Parameters:
9188: + mat - the matrix
9189: - nullsp - the null space object
9191: Level: advanced
9193: Notes:
9194: Overwrites any previous near null space that may have been attached
9196: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9198: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9199: @*/
9200: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9201: {
9202: PetscFunctionBegin;
9206: MatCheckPreallocated(mat, 1);
9207: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9208: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9209: mat->nearnullsp = nullsp;
9210: PetscFunctionReturn(PETSC_SUCCESS);
9211: }
9213: /*@
9214: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9216: Not Collective
9218: Input Parameter:
9219: . mat - the matrix
9221: Output Parameter:
9222: . nullsp - the null space object, `NULL` if not set
9224: Level: advanced
9226: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9227: @*/
9228: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9229: {
9230: PetscFunctionBegin;
9233: PetscAssertPointer(nullsp, 2);
9234: MatCheckPreallocated(mat, 1);
9235: *nullsp = mat->nearnullsp;
9236: PetscFunctionReturn(PETSC_SUCCESS);
9237: }
9239: /*@
9240: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9242: Collective
9244: Input Parameters:
9245: + mat - the matrix
9246: . row - row/column permutation
9247: - info - information on desired factorization process
9249: Level: developer
9251: Notes:
9252: Probably really in-place only when level of fill is zero, otherwise allocates
9253: new space to store factored matrix and deletes previous memory.
9255: Most users should employ the `KSP` interface for linear solvers
9256: instead of working directly with matrix algebra routines such as this.
9257: See, e.g., `KSPCreate()`.
9259: Developer Note:
9260: The Fortran interface is not autogenerated as the
9261: interface definition cannot be generated correctly [due to `MatFactorInfo`]
9263: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9264: @*/
9265: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9266: {
9267: PetscFunctionBegin;
9271: PetscAssertPointer(info, 3);
9272: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9273: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9274: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9275: MatCheckPreallocated(mat, 1);
9276: PetscUseTypeMethod(mat, iccfactor, row, info);
9277: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9278: PetscFunctionReturn(PETSC_SUCCESS);
9279: }
9281: /*@
9282: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9283: ghosted ones.
9285: Not Collective
9287: Input Parameters:
9288: + mat - the matrix
9289: - diag - the diagonal values, including ghost ones
9291: Level: developer
9293: Notes:
9294: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9296: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9298: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9299: @*/
9300: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9301: {
9302: PetscMPIInt size;
9304: PetscFunctionBegin;
9309: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9310: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9311: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9312: if (size == 1) {
9313: PetscInt n, m;
9314: PetscCall(VecGetSize(diag, &n));
9315: PetscCall(MatGetSize(mat, NULL, &m));
9316: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9317: PetscCall(MatDiagonalScale(mat, NULL, diag));
9318: } else {
9319: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9320: }
9321: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9322: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9323: PetscFunctionReturn(PETSC_SUCCESS);
9324: }
9326: /*@
9327: MatGetInertia - Gets the inertia from a factored matrix
9329: Collective
9331: Input Parameter:
9332: . mat - the matrix
9334: Output Parameters:
9335: + nneg - number of negative eigenvalues
9336: . nzero - number of zero eigenvalues
9337: - npos - number of positive eigenvalues
9339: Level: advanced
9341: Note:
9342: Matrix must have been factored by `MatCholeskyFactor()`
9344: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9345: @*/
9346: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9347: {
9348: PetscFunctionBegin;
9351: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9352: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9353: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9354: PetscFunctionReturn(PETSC_SUCCESS);
9355: }
9357: /*@C
9358: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9360: Neighbor-wise Collective
9362: Input Parameters:
9363: + mat - the factored matrix obtained with `MatGetFactor()`
9364: - b - the right-hand-side vectors
9366: Output Parameter:
9367: . x - the result vectors
9369: Level: developer
9371: Note:
9372: The vectors `b` and `x` cannot be the same. I.e., one cannot
9373: call `MatSolves`(A,x,x).
9375: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9376: @*/
9377: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9378: {
9379: PetscFunctionBegin;
9382: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9383: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9384: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9386: MatCheckPreallocated(mat, 1);
9387: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9388: PetscUseTypeMethod(mat, solves, b, x);
9389: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9390: PetscFunctionReturn(PETSC_SUCCESS);
9391: }
9393: /*@
9394: MatIsSymmetric - Test whether a matrix is symmetric
9396: Collective
9398: Input Parameters:
9399: + A - the matrix to test
9400: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9402: Output Parameter:
9403: . flg - the result
9405: Level: intermediate
9407: Notes:
9408: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9410: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9412: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9413: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9415: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9416: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9417: @*/
9418: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9419: {
9420: PetscFunctionBegin;
9422: PetscAssertPointer(flg, 3);
9423: if (A->symmetric != PETSC_BOOL3_UNKNOWN) *flg = PetscBool3ToBool(A->symmetric);
9424: else {
9425: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9426: else PetscCall(MatIsTranspose(A, A, tol, flg));
9427: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9428: }
9429: PetscFunctionReturn(PETSC_SUCCESS);
9430: }
9432: /*@
9433: MatIsHermitian - Test whether a matrix is Hermitian
9435: Collective
9437: Input Parameters:
9438: + A - the matrix to test
9439: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9441: Output Parameter:
9442: . flg - the result
9444: Level: intermediate
9446: Notes:
9447: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9449: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9451: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9452: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9454: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9455: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9456: @*/
9457: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9458: {
9459: PetscFunctionBegin;
9461: PetscAssertPointer(flg, 3);
9462: if (A->hermitian != PETSC_BOOL3_UNKNOWN) *flg = PetscBool3ToBool(A->hermitian);
9463: else {
9464: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9465: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9466: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9467: }
9468: PetscFunctionReturn(PETSC_SUCCESS);
9469: }
9471: /*@
9472: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9474: Not Collective
9476: Input Parameter:
9477: . A - the matrix to check
9479: Output Parameters:
9480: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9481: - flg - the result (only valid if set is `PETSC_TRUE`)
9483: Level: advanced
9485: Notes:
9486: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9487: if you want it explicitly checked
9489: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9490: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9492: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9493: @*/
9494: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9495: {
9496: PetscFunctionBegin;
9498: PetscAssertPointer(set, 2);
9499: PetscAssertPointer(flg, 3);
9500: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9501: *set = PETSC_TRUE;
9502: *flg = PetscBool3ToBool(A->symmetric);
9503: } else {
9504: *set = PETSC_FALSE;
9505: }
9506: PetscFunctionReturn(PETSC_SUCCESS);
9507: }
9509: /*@
9510: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9512: Not Collective
9514: Input Parameter:
9515: . A - the matrix to check
9517: Output Parameters:
9518: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9519: - flg - the result (only valid if set is `PETSC_TRUE`)
9521: Level: advanced
9523: Notes:
9524: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9526: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9527: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9529: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9530: @*/
9531: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9532: {
9533: PetscFunctionBegin;
9535: PetscAssertPointer(set, 2);
9536: PetscAssertPointer(flg, 3);
9537: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9538: *set = PETSC_TRUE;
9539: *flg = PetscBool3ToBool(A->spd);
9540: } else {
9541: *set = PETSC_FALSE;
9542: }
9543: PetscFunctionReturn(PETSC_SUCCESS);
9544: }
9546: /*@
9547: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9549: Not Collective
9551: Input Parameter:
9552: . A - the matrix to check
9554: Output Parameters:
9555: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9556: - flg - the result (only valid if set is `PETSC_TRUE`)
9558: Level: advanced
9560: Notes:
9561: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9562: if you want it explicitly checked
9564: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9565: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9567: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9568: @*/
9569: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9570: {
9571: PetscFunctionBegin;
9573: PetscAssertPointer(set, 2);
9574: PetscAssertPointer(flg, 3);
9575: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9576: *set = PETSC_TRUE;
9577: *flg = PetscBool3ToBool(A->hermitian);
9578: } else {
9579: *set = PETSC_FALSE;
9580: }
9581: PetscFunctionReturn(PETSC_SUCCESS);
9582: }
9584: /*@
9585: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9587: Collective
9589: Input Parameter:
9590: . A - the matrix to test
9592: Output Parameter:
9593: . flg - the result
9595: Level: intermediate
9597: Notes:
9598: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9600: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9601: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9603: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9604: @*/
9605: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9606: {
9607: PetscFunctionBegin;
9609: PetscAssertPointer(flg, 2);
9610: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9611: *flg = PetscBool3ToBool(A->structurally_symmetric);
9612: } else {
9613: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9614: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9615: }
9616: PetscFunctionReturn(PETSC_SUCCESS);
9617: }
9619: /*@
9620: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9622: Not Collective
9624: Input Parameter:
9625: . A - the matrix to check
9627: Output Parameters:
9628: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9629: - flg - the result (only valid if set is PETSC_TRUE)
9631: Level: advanced
9633: Notes:
9634: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9635: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9637: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9639: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9640: @*/
9641: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9642: {
9643: PetscFunctionBegin;
9645: PetscAssertPointer(set, 2);
9646: PetscAssertPointer(flg, 3);
9647: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9648: *set = PETSC_TRUE;
9649: *flg = PetscBool3ToBool(A->structurally_symmetric);
9650: } else {
9651: *set = PETSC_FALSE;
9652: }
9653: PetscFunctionReturn(PETSC_SUCCESS);
9654: }
9656: /*@
9657: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9658: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9660: Not Collective
9662: Input Parameter:
9663: . mat - the matrix
9665: Output Parameters:
9666: + nstash - the size of the stash
9667: . reallocs - the number of additional mallocs incurred.
9668: . bnstash - the size of the block stash
9669: - breallocs - the number of additional mallocs incurred.in the block stash
9671: Level: advanced
9673: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9674: @*/
9675: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9676: {
9677: PetscFunctionBegin;
9678: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9679: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9680: PetscFunctionReturn(PETSC_SUCCESS);
9681: }
9683: /*@
9684: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9685: parallel layout, `PetscLayout` for rows and columns
9687: Collective
9689: Input Parameter:
9690: . mat - the matrix
9692: Output Parameters:
9693: + right - (optional) vector that the matrix can be multiplied against
9694: - left - (optional) vector that the matrix vector product can be stored in
9696: Level: advanced
9698: Notes:
9699: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9701: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9703: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9704: @*/
9705: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9706: {
9707: PetscFunctionBegin;
9710: if (mat->ops->getvecs) {
9711: PetscUseTypeMethod(mat, getvecs, right, left);
9712: } else {
9713: if (right) {
9714: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9715: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9716: PetscCall(VecSetType(*right, mat->defaultvectype));
9717: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9718: if (mat->boundtocpu && mat->bindingpropagates) {
9719: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9720: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9721: }
9722: #endif
9723: }
9724: if (left) {
9725: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9726: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9727: PetscCall(VecSetType(*left, mat->defaultvectype));
9728: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9729: if (mat->boundtocpu && mat->bindingpropagates) {
9730: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9731: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9732: }
9733: #endif
9734: }
9735: }
9736: PetscFunctionReturn(PETSC_SUCCESS);
9737: }
9739: /*@
9740: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9741: with default values.
9743: Not Collective
9745: Input Parameter:
9746: . info - the `MatFactorInfo` data structure
9748: Level: developer
9750: Notes:
9751: The solvers are generally used through the `KSP` and `PC` objects, for example
9752: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9754: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9756: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9757: @*/
9758: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9759: {
9760: PetscFunctionBegin;
9761: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9762: PetscFunctionReturn(PETSC_SUCCESS);
9763: }
9765: /*@
9766: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9768: Collective
9770: Input Parameters:
9771: + mat - the factored matrix
9772: - is - the index set defining the Schur indices (0-based)
9774: Level: advanced
9776: Notes:
9777: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9779: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9781: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9783: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9784: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9785: @*/
9786: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9787: {
9788: PetscErrorCode (*f)(Mat, IS);
9790: PetscFunctionBegin;
9795: PetscCheckSameComm(mat, 1, is, 2);
9796: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9797: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9798: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9799: PetscCall(MatDestroy(&mat->schur));
9800: PetscCall((*f)(mat, is));
9801: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9802: PetscFunctionReturn(PETSC_SUCCESS);
9803: }
9805: /*@
9806: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9808: Logically Collective
9810: Input Parameters:
9811: + F - the factored matrix obtained by calling `MatGetFactor()`
9812: . S - location where to return the Schur complement, can be `NULL`
9813: - status - the status of the Schur complement matrix, can be `NULL`
9815: Level: advanced
9817: Notes:
9818: You must call `MatFactorSetSchurIS()` before calling this routine.
9820: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9822: The routine provides a copy of the Schur matrix stored within the solver data structures.
9823: The caller must destroy the object when it is no longer needed.
9824: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9826: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9828: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9830: Developer Note:
9831: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9832: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9834: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9835: @*/
9836: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9837: {
9838: PetscFunctionBegin;
9840: if (S) PetscAssertPointer(S, 2);
9841: if (status) PetscAssertPointer(status, 3);
9842: if (S) {
9843: PetscErrorCode (*f)(Mat, Mat *);
9845: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9846: if (f) {
9847: PetscCall((*f)(F, S));
9848: } else {
9849: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9850: }
9851: }
9852: if (status) *status = F->schur_status;
9853: PetscFunctionReturn(PETSC_SUCCESS);
9854: }
9856: /*@
9857: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9859: Logically Collective
9861: Input Parameters:
9862: + F - the factored matrix obtained by calling `MatGetFactor()`
9863: . S - location where to return the Schur complement, can be `NULL`
9864: - status - the status of the Schur complement matrix, can be `NULL`
9866: Level: advanced
9868: Notes:
9869: You must call `MatFactorSetSchurIS()` before calling this routine.
9871: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9873: The routine returns a the Schur Complement stored within the data structures of the solver.
9875: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9877: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9879: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9881: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9883: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9884: @*/
9885: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9886: {
9887: PetscFunctionBegin;
9889: if (S) {
9890: PetscAssertPointer(S, 2);
9891: *S = F->schur;
9892: }
9893: if (status) {
9894: PetscAssertPointer(status, 3);
9895: *status = F->schur_status;
9896: }
9897: PetscFunctionReturn(PETSC_SUCCESS);
9898: }
9900: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9901: {
9902: Mat S = F->schur;
9904: PetscFunctionBegin;
9905: switch (F->schur_status) {
9906: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9907: case MAT_FACTOR_SCHUR_INVERTED:
9908: if (S) {
9909: S->ops->solve = NULL;
9910: S->ops->matsolve = NULL;
9911: S->ops->solvetranspose = NULL;
9912: S->ops->matsolvetranspose = NULL;
9913: S->ops->solveadd = NULL;
9914: S->ops->solvetransposeadd = NULL;
9915: S->factortype = MAT_FACTOR_NONE;
9916: PetscCall(PetscFree(S->solvertype));
9917: }
9918: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9919: break;
9920: default:
9921: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9922: }
9923: PetscFunctionReturn(PETSC_SUCCESS);
9924: }
9926: /*@
9927: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9929: Logically Collective
9931: Input Parameters:
9932: + F - the factored matrix obtained by calling `MatGetFactor()`
9933: . S - location where the Schur complement is stored
9934: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9936: Level: advanced
9938: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9939: @*/
9940: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9941: {
9942: PetscFunctionBegin;
9944: if (S) {
9946: *S = NULL;
9947: }
9948: F->schur_status = status;
9949: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9950: PetscFunctionReturn(PETSC_SUCCESS);
9951: }
9953: /*@
9954: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9956: Logically Collective
9958: Input Parameters:
9959: + F - the factored matrix obtained by calling `MatGetFactor()`
9960: . rhs - location where the right-hand side of the Schur complement system is stored
9961: - sol - location where the solution of the Schur complement system has to be returned
9963: Level: advanced
9965: Notes:
9966: The sizes of the vectors should match the size of the Schur complement
9968: Must be called after `MatFactorSetSchurIS()`
9970: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9971: @*/
9972: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9973: {
9974: PetscFunctionBegin;
9981: PetscCheckSameComm(F, 1, rhs, 2);
9982: PetscCheckSameComm(F, 1, sol, 3);
9983: PetscCall(MatFactorFactorizeSchurComplement(F));
9984: switch (F->schur_status) {
9985: case MAT_FACTOR_SCHUR_FACTORED:
9986: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9987: break;
9988: case MAT_FACTOR_SCHUR_INVERTED:
9989: PetscCall(MatMultTranspose(F->schur, rhs, sol));
9990: break;
9991: default:
9992: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9993: }
9994: PetscFunctionReturn(PETSC_SUCCESS);
9995: }
9997: /*@
9998: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
10000: Logically Collective
10002: Input Parameters:
10003: + F - the factored matrix obtained by calling `MatGetFactor()`
10004: . rhs - location where the right-hand side of the Schur complement system is stored
10005: - sol - location where the solution of the Schur complement system has to be returned
10007: Level: advanced
10009: Notes:
10010: The sizes of the vectors should match the size of the Schur complement
10012: Must be called after `MatFactorSetSchurIS()`
10014: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
10015: @*/
10016: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
10017: {
10018: PetscFunctionBegin;
10025: PetscCheckSameComm(F, 1, rhs, 2);
10026: PetscCheckSameComm(F, 1, sol, 3);
10027: PetscCall(MatFactorFactorizeSchurComplement(F));
10028: switch (F->schur_status) {
10029: case MAT_FACTOR_SCHUR_FACTORED:
10030: PetscCall(MatSolve(F->schur, rhs, sol));
10031: break;
10032: case MAT_FACTOR_SCHUR_INVERTED:
10033: PetscCall(MatMult(F->schur, rhs, sol));
10034: break;
10035: default:
10036: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
10037: }
10038: PetscFunctionReturn(PETSC_SUCCESS);
10039: }
10041: PETSC_EXTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
10042: #if PetscDefined(HAVE_CUDA)
10043: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
10044: #endif
10046: /* Schur status updated in the interface */
10047: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
10048: {
10049: Mat S = F->schur;
10051: PetscFunctionBegin;
10052: if (S) {
10053: PetscMPIInt size;
10054: PetscBool isdense, isdensecuda;
10056: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
10057: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
10058: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
10059: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
10060: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
10061: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
10062: if (isdense) {
10063: PetscCall(MatSeqDenseInvertFactors_Private(S));
10064: } else if (isdensecuda) {
10065: #if defined(PETSC_HAVE_CUDA)
10066: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
10067: #endif
10068: }
10069: // HIP??????????????
10070: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
10071: }
10072: PetscFunctionReturn(PETSC_SUCCESS);
10073: }
10075: /*@
10076: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
10078: Logically Collective
10080: Input Parameter:
10081: . F - the factored matrix obtained by calling `MatGetFactor()`
10083: Level: advanced
10085: Notes:
10086: Must be called after `MatFactorSetSchurIS()`.
10088: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
10090: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
10091: @*/
10092: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
10093: {
10094: PetscFunctionBegin;
10097: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
10098: PetscCall(MatFactorFactorizeSchurComplement(F));
10099: PetscCall(MatFactorInvertSchurComplement_Private(F));
10100: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
10101: PetscFunctionReturn(PETSC_SUCCESS);
10102: }
10104: /*@
10105: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
10107: Logically Collective
10109: Input Parameter:
10110: . F - the factored matrix obtained by calling `MatGetFactor()`
10112: Level: advanced
10114: Note:
10115: Must be called after `MatFactorSetSchurIS()`
10117: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10118: @*/
10119: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10120: {
10121: MatFactorInfo info;
10123: PetscFunctionBegin;
10126: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10127: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10128: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10129: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10130: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10131: } else {
10132: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10133: }
10134: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10135: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10136: PetscFunctionReturn(PETSC_SUCCESS);
10137: }
10139: /*@
10140: MatPtAP - Creates the matrix product $C = P^T * A * P$
10142: Neighbor-wise Collective
10144: Input Parameters:
10145: + A - the matrix
10146: . P - the projection matrix
10147: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10148: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DEFAULT` if you do not have a good estimate
10149: if the result is a dense matrix this is irrelevant
10151: Output Parameter:
10152: . C - the product matrix
10154: Level: intermediate
10156: Notes:
10157: C will be created and must be destroyed by the user with `MatDestroy()`.
10159: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10161: Developer Note:
10162: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10164: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10165: @*/
10166: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10167: {
10168: PetscFunctionBegin;
10169: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10170: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10172: if (scall == MAT_INITIAL_MATRIX) {
10173: PetscCall(MatProductCreate(A, P, NULL, C));
10174: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10175: PetscCall(MatProductSetAlgorithm(*C, "default"));
10176: PetscCall(MatProductSetFill(*C, fill));
10178: (*C)->product->api_user = PETSC_TRUE;
10179: PetscCall(MatProductSetFromOptions(*C));
10180: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10181: PetscCall(MatProductSymbolic(*C));
10182: } else { /* scall == MAT_REUSE_MATRIX */
10183: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10184: }
10186: PetscCall(MatProductNumeric(*C));
10187: (*C)->symmetric = A->symmetric;
10188: (*C)->spd = A->spd;
10189: PetscFunctionReturn(PETSC_SUCCESS);
10190: }
10192: /*@
10193: MatRARt - Creates the matrix product $C = R * A * R^T$
10195: Neighbor-wise Collective
10197: Input Parameters:
10198: + A - the matrix
10199: . R - the projection matrix
10200: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10201: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DEFAULT` if you do not have a good estimate
10202: if the result is a dense matrix this is irrelevant
10204: Output Parameter:
10205: . C - the product matrix
10207: Level: intermediate
10209: Notes:
10210: C will be created and must be destroyed by the user with `MatDestroy()`.
10212: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10214: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10215: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10216: parallel MatRARt is implemented via explicit transpose of R, which could be very expensive.
10217: We recommend using MatPtAP().
10219: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10220: @*/
10221: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10222: {
10223: PetscFunctionBegin;
10224: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10225: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10227: if (scall == MAT_INITIAL_MATRIX) {
10228: PetscCall(MatProductCreate(A, R, NULL, C));
10229: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10230: PetscCall(MatProductSetAlgorithm(*C, "default"));
10231: PetscCall(MatProductSetFill(*C, fill));
10233: (*C)->product->api_user = PETSC_TRUE;
10234: PetscCall(MatProductSetFromOptions(*C));
10235: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10236: PetscCall(MatProductSymbolic(*C));
10237: } else { /* scall == MAT_REUSE_MATRIX */
10238: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10239: }
10241: PetscCall(MatProductNumeric(*C));
10242: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10243: PetscFunctionReturn(PETSC_SUCCESS);
10244: }
10246: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10247: {
10248: PetscBool flg = PETSC_TRUE;
10250: PetscFunctionBegin;
10251: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10252: if (scall == MAT_INITIAL_MATRIX) {
10253: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10254: PetscCall(MatProductCreate(A, B, NULL, C));
10255: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10256: PetscCall(MatProductSetFill(*C, fill));
10257: } else { /* scall == MAT_REUSE_MATRIX */
10258: Mat_Product *product = (*C)->product;
10260: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10261: if (flg && product && product->type != ptype) {
10262: PetscCall(MatProductClear(*C));
10263: product = NULL;
10264: }
10265: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10266: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10267: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10268: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10269: product = (*C)->product;
10270: product->fill = fill;
10271: product->clear = PETSC_TRUE;
10272: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10273: flg = PETSC_FALSE;
10274: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10275: }
10276: }
10277: if (flg) {
10278: (*C)->product->api_user = PETSC_TRUE;
10279: PetscCall(MatProductSetType(*C, ptype));
10280: PetscCall(MatProductSetFromOptions(*C));
10281: PetscCall(MatProductSymbolic(*C));
10282: }
10283: PetscCall(MatProductNumeric(*C));
10284: PetscFunctionReturn(PETSC_SUCCESS);
10285: }
10287: /*@
10288: MatMatMult - Performs matrix-matrix multiplication C=A*B.
10290: Neighbor-wise Collective
10292: Input Parameters:
10293: + A - the left matrix
10294: . B - the right matrix
10295: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10296: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if you do not have a good estimate
10297: if the result is a dense matrix this is irrelevant
10299: Output Parameter:
10300: . C - the product matrix
10302: Notes:
10303: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10305: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10306: call to this function with `MAT_INITIAL_MATRIX`.
10308: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value actually needed.
10310: In the special case where matrix B (and hence C) are dense you can create the correctly sized matrix C yourself and then call this routine with `MAT_REUSE_MATRIX`,
10311: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix C is sparse.
10313: Example of Usage:
10314: .vb
10315: MatProductCreate(A,B,NULL,&C);
10316: MatProductSetType(C,MATPRODUCT_AB);
10317: MatProductSymbolic(C);
10318: MatProductNumeric(C); // compute C=A * B
10319: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10320: MatProductNumeric(C);
10321: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10322: MatProductNumeric(C);
10323: .ve
10325: Level: intermediate
10327: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10328: @*/
10329: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10330: {
10331: PetscFunctionBegin;
10332: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10333: PetscFunctionReturn(PETSC_SUCCESS);
10334: }
10336: /*@
10337: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10339: Neighbor-wise Collective
10341: Input Parameters:
10342: + A - the left matrix
10343: . B - the right matrix
10344: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10345: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if not known
10347: Output Parameter:
10348: . C - the product matrix
10350: Options Database Key:
10351: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10352: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10353: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10355: Level: intermediate
10357: Notes:
10358: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10360: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10362: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10363: actually needed.
10365: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10366: and for pairs of `MATMPIDENSE` matrices.
10368: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`
10370: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10371: @*/
10372: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10373: {
10374: PetscFunctionBegin;
10375: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10376: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10377: PetscFunctionReturn(PETSC_SUCCESS);
10378: }
10380: /*@
10381: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10383: Neighbor-wise Collective
10385: Input Parameters:
10386: + A - the left matrix
10387: . B - the right matrix
10388: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10389: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if not known
10391: Output Parameter:
10392: . C - the product matrix
10394: Level: intermediate
10396: Notes:
10397: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10399: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
10401: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`
10403: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10404: actually needed.
10406: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10407: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10409: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10410: @*/
10411: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10412: {
10413: PetscFunctionBegin;
10414: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10415: PetscFunctionReturn(PETSC_SUCCESS);
10416: }
10418: /*@
10419: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10421: Neighbor-wise Collective
10423: Input Parameters:
10424: + A - the left matrix
10425: . B - the middle matrix
10426: . C - the right matrix
10427: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10428: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DEFAULT` if you do not have a good estimate
10429: if the result is a dense matrix this is irrelevant
10431: Output Parameter:
10432: . D - the product matrix
10434: Level: intermediate
10436: Notes:
10437: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10439: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10441: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`
10443: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10444: actually needed.
10446: If you have many matrices with the same non-zero structure to multiply, you
10447: should use `MAT_REUSE_MATRIX` in all calls but the first
10449: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10450: @*/
10451: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10452: {
10453: PetscFunctionBegin;
10454: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10455: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10457: if (scall == MAT_INITIAL_MATRIX) {
10458: PetscCall(MatProductCreate(A, B, C, D));
10459: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10460: PetscCall(MatProductSetAlgorithm(*D, "default"));
10461: PetscCall(MatProductSetFill(*D, fill));
10463: (*D)->product->api_user = PETSC_TRUE;
10464: PetscCall(MatProductSetFromOptions(*D));
10465: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10466: ((PetscObject)C)->type_name);
10467: PetscCall(MatProductSymbolic(*D));
10468: } else { /* user may change input matrices when REUSE */
10469: PetscCall(MatProductReplaceMats(A, B, C, *D));
10470: }
10471: PetscCall(MatProductNumeric(*D));
10472: PetscFunctionReturn(PETSC_SUCCESS);
10473: }
10475: /*@
10476: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10478: Collective
10480: Input Parameters:
10481: + mat - the matrix
10482: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10483: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10484: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10486: Output Parameter:
10487: . matredundant - redundant matrix
10489: Level: advanced
10491: Notes:
10492: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10493: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10495: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10496: calling it.
10498: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10500: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10501: @*/
10502: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10503: {
10504: MPI_Comm comm;
10505: PetscMPIInt size;
10506: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10507: Mat_Redundant *redund = NULL;
10508: PetscSubcomm psubcomm = NULL;
10509: MPI_Comm subcomm_in = subcomm;
10510: Mat *matseq;
10511: IS isrow, iscol;
10512: PetscBool newsubcomm = PETSC_FALSE;
10514: PetscFunctionBegin;
10516: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10517: PetscAssertPointer(*matredundant, 5);
10519: }
10521: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10522: if (size == 1 || nsubcomm == 1) {
10523: if (reuse == MAT_INITIAL_MATRIX) {
10524: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10525: } else {
10526: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10527: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10528: }
10529: PetscFunctionReturn(PETSC_SUCCESS);
10530: }
10532: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10533: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10534: MatCheckPreallocated(mat, 1);
10536: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10537: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10538: /* create psubcomm, then get subcomm */
10539: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10540: PetscCallMPI(MPI_Comm_size(comm, &size));
10541: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10543: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10544: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10545: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10546: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10547: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10548: newsubcomm = PETSC_TRUE;
10549: PetscCall(PetscSubcommDestroy(&psubcomm));
10550: }
10552: /* get isrow, iscol and a local sequential matrix matseq[0] */
10553: if (reuse == MAT_INITIAL_MATRIX) {
10554: mloc_sub = PETSC_DECIDE;
10555: nloc_sub = PETSC_DECIDE;
10556: if (bs < 1) {
10557: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10558: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10559: } else {
10560: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10561: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10562: }
10563: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10564: rstart = rend - mloc_sub;
10565: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10566: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10567: PetscCall(ISSetIdentity(iscol));
10568: } else { /* reuse == MAT_REUSE_MATRIX */
10569: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10570: /* retrieve subcomm */
10571: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10572: redund = (*matredundant)->redundant;
10573: isrow = redund->isrow;
10574: iscol = redund->iscol;
10575: matseq = redund->matseq;
10576: }
10577: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10579: /* get matredundant over subcomm */
10580: if (reuse == MAT_INITIAL_MATRIX) {
10581: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10583: /* create a supporting struct and attach it to C for reuse */
10584: PetscCall(PetscNew(&redund));
10585: (*matredundant)->redundant = redund;
10586: redund->isrow = isrow;
10587: redund->iscol = iscol;
10588: redund->matseq = matseq;
10589: if (newsubcomm) {
10590: redund->subcomm = subcomm;
10591: } else {
10592: redund->subcomm = MPI_COMM_NULL;
10593: }
10594: } else {
10595: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10596: }
10597: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10598: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10599: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10600: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10601: }
10602: #endif
10603: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10604: PetscFunctionReturn(PETSC_SUCCESS);
10605: }
10607: /*@C
10608: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10609: a given `Mat`. Each submatrix can span multiple procs.
10611: Collective
10613: Input Parameters:
10614: + mat - the matrix
10615: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10616: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10618: Output Parameter:
10619: . subMat - parallel sub-matrices each spanning a given `subcomm`
10621: Level: advanced
10623: Notes:
10624: The submatrix partition across processors is dictated by `subComm` a
10625: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10626: is not restricted to be grouped with consecutive original MPI processes.
10628: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10629: map directly to the layout of the original matrix [wrt the local
10630: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10631: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10632: the `subMat`. However the offDiagMat looses some columns - and this is
10633: reconstructed with `MatSetValues()`
10635: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10637: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10638: @*/
10639: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10640: {
10641: PetscMPIInt commsize, subCommSize;
10643: PetscFunctionBegin;
10644: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10645: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10646: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10648: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10649: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10650: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10651: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10652: PetscFunctionReturn(PETSC_SUCCESS);
10653: }
10655: /*@
10656: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10658: Not Collective
10660: Input Parameters:
10661: + mat - matrix to extract local submatrix from
10662: . isrow - local row indices for submatrix
10663: - iscol - local column indices for submatrix
10665: Output Parameter:
10666: . submat - the submatrix
10668: Level: intermediate
10670: Notes:
10671: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10673: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10674: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10676: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10677: `MatSetValuesBlockedLocal()` will also be implemented.
10679: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10680: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10682: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10683: @*/
10684: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10685: {
10686: PetscFunctionBegin;
10690: PetscCheckSameComm(isrow, 2, iscol, 3);
10691: PetscAssertPointer(submat, 4);
10692: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10694: if (mat->ops->getlocalsubmatrix) {
10695: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10696: } else {
10697: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10698: }
10699: PetscFunctionReturn(PETSC_SUCCESS);
10700: }
10702: /*@
10703: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10705: Not Collective
10707: Input Parameters:
10708: + mat - matrix to extract local submatrix from
10709: . isrow - local row indices for submatrix
10710: . iscol - local column indices for submatrix
10711: - submat - the submatrix
10713: Level: intermediate
10715: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10716: @*/
10717: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10718: {
10719: PetscFunctionBegin;
10723: PetscCheckSameComm(isrow, 2, iscol, 3);
10724: PetscAssertPointer(submat, 4);
10727: if (mat->ops->restorelocalsubmatrix) {
10728: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10729: } else {
10730: PetscCall(MatDestroy(submat));
10731: }
10732: *submat = NULL;
10733: PetscFunctionReturn(PETSC_SUCCESS);
10734: }
10736: /*@
10737: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10739: Collective
10741: Input Parameter:
10742: . mat - the matrix
10744: Output Parameter:
10745: . is - if any rows have zero diagonals this contains the list of them
10747: Level: developer
10749: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10750: @*/
10751: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10752: {
10753: PetscFunctionBegin;
10756: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10757: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10759: if (!mat->ops->findzerodiagonals) {
10760: Vec diag;
10761: const PetscScalar *a;
10762: PetscInt *rows;
10763: PetscInt rStart, rEnd, r, nrow = 0;
10765: PetscCall(MatCreateVecs(mat, &diag, NULL));
10766: PetscCall(MatGetDiagonal(mat, diag));
10767: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10768: PetscCall(VecGetArrayRead(diag, &a));
10769: for (r = 0; r < rEnd - rStart; ++r)
10770: if (a[r] == 0.0) ++nrow;
10771: PetscCall(PetscMalloc1(nrow, &rows));
10772: nrow = 0;
10773: for (r = 0; r < rEnd - rStart; ++r)
10774: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10775: PetscCall(VecRestoreArrayRead(diag, &a));
10776: PetscCall(VecDestroy(&diag));
10777: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10778: } else {
10779: PetscUseTypeMethod(mat, findzerodiagonals, is);
10780: }
10781: PetscFunctionReturn(PETSC_SUCCESS);
10782: }
10784: /*@
10785: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10787: Collective
10789: Input Parameter:
10790: . mat - the matrix
10792: Output Parameter:
10793: . is - contains the list of rows with off block diagonal entries
10795: Level: developer
10797: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10798: @*/
10799: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10800: {
10801: PetscFunctionBegin;
10804: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10805: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10807: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10808: PetscFunctionReturn(PETSC_SUCCESS);
10809: }
10811: /*@C
10812: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10814: Collective; No Fortran Support
10816: Input Parameter:
10817: . mat - the matrix
10819: Output Parameter:
10820: . values - the block inverses in column major order (FORTRAN-like)
10822: Level: advanced
10824: Notes:
10825: The size of the blocks is determined by the block size of the matrix.
10827: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10829: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10831: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10832: @*/
10833: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10834: {
10835: PetscFunctionBegin;
10837: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10838: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10839: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10840: PetscFunctionReturn(PETSC_SUCCESS);
10841: }
10843: /*@
10844: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10846: Collective; No Fortran Support
10848: Input Parameters:
10849: + mat - the matrix
10850: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10851: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10853: Output Parameter:
10854: . values - the block inverses in column major order (FORTRAN-like)
10856: Level: advanced
10858: Notes:
10859: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10861: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10863: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10864: @*/
10865: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10866: {
10867: PetscFunctionBegin;
10869: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10870: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10871: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10872: PetscFunctionReturn(PETSC_SUCCESS);
10873: }
10875: /*@
10876: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10878: Collective
10880: Input Parameters:
10881: + A - the matrix
10882: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10884: Level: advanced
10886: Note:
10887: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10889: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10890: @*/
10891: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10892: {
10893: const PetscScalar *vals;
10894: PetscInt *dnnz;
10895: PetscInt m, rstart, rend, bs, i, j;
10897: PetscFunctionBegin;
10898: PetscCall(MatInvertBlockDiagonal(A, &vals));
10899: PetscCall(MatGetBlockSize(A, &bs));
10900: PetscCall(MatGetLocalSize(A, &m, NULL));
10901: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10902: PetscCall(PetscMalloc1(m / bs, &dnnz));
10903: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10904: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10905: PetscCall(PetscFree(dnnz));
10906: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10907: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10908: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10909: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10910: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10911: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10912: PetscFunctionReturn(PETSC_SUCCESS);
10913: }
10915: /*@
10916: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10917: via `MatTransposeColoringCreate()`.
10919: Collective
10921: Input Parameter:
10922: . c - coloring context
10924: Level: intermediate
10926: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10927: @*/
10928: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10929: {
10930: MatTransposeColoring matcolor = *c;
10932: PetscFunctionBegin;
10933: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10934: if (--((PetscObject)matcolor)->refct > 0) {
10935: matcolor = NULL;
10936: PetscFunctionReturn(PETSC_SUCCESS);
10937: }
10939: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10940: PetscCall(PetscFree(matcolor->rows));
10941: PetscCall(PetscFree(matcolor->den2sp));
10942: PetscCall(PetscFree(matcolor->colorforcol));
10943: PetscCall(PetscFree(matcolor->columns));
10944: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10945: PetscCall(PetscHeaderDestroy(c));
10946: PetscFunctionReturn(PETSC_SUCCESS);
10947: }
10949: /*@
10950: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
10951: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
10952: `MatTransposeColoring` to sparse `B`.
10954: Collective
10956: Input Parameters:
10957: + coloring - coloring context created with `MatTransposeColoringCreate()`
10958: - B - sparse matrix
10960: Output Parameter:
10961: . Btdense - dense matrix $B^T$
10963: Level: developer
10965: Note:
10966: These are used internally for some implementations of `MatRARt()`
10968: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10969: @*/
10970: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10971: {
10972: PetscFunctionBegin;
10977: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10978: PetscFunctionReturn(PETSC_SUCCESS);
10979: }
10981: /*@
10982: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
10983: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
10984: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10985: $C_{sp}$ from $C_{den}$.
10987: Collective
10989: Input Parameters:
10990: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10991: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
10993: Output Parameter:
10994: . Csp - sparse matrix
10996: Level: developer
10998: Note:
10999: These are used internally for some implementations of `MatRARt()`
11001: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
11002: @*/
11003: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
11004: {
11005: PetscFunctionBegin;
11010: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
11011: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
11012: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
11013: PetscFunctionReturn(PETSC_SUCCESS);
11014: }
11016: /*@
11017: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
11019: Collective
11021: Input Parameters:
11022: + mat - the matrix product C
11023: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
11025: Output Parameter:
11026: . color - the new coloring context
11028: Level: intermediate
11030: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
11031: `MatTransColoringApplyDenToSp()`
11032: @*/
11033: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
11034: {
11035: MatTransposeColoring c;
11036: MPI_Comm comm;
11038: PetscFunctionBegin;
11039: PetscAssertPointer(color, 3);
11041: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11042: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
11043: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
11044: c->ctype = iscoloring->ctype;
11045: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
11046: *color = c;
11047: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11048: PetscFunctionReturn(PETSC_SUCCESS);
11049: }
11051: /*@
11052: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
11053: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
11055: Not Collective
11057: Input Parameter:
11058: . mat - the matrix
11060: Output Parameter:
11061: . state - the current state
11063: Level: intermediate
11065: Notes:
11066: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
11067: different matrices
11069: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
11071: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
11073: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
11074: @*/
11075: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
11076: {
11077: PetscFunctionBegin;
11079: *state = mat->nonzerostate;
11080: PetscFunctionReturn(PETSC_SUCCESS);
11081: }
11083: /*@
11084: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
11085: matrices from each processor
11087: Collective
11089: Input Parameters:
11090: + comm - the communicators the parallel matrix will live on
11091: . seqmat - the input sequential matrices
11092: . n - number of local columns (or `PETSC_DECIDE`)
11093: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11095: Output Parameter:
11096: . mpimat - the parallel matrix generated
11098: Level: developer
11100: Note:
11101: The number of columns of the matrix in EACH processor MUST be the same.
11103: .seealso: [](ch_matrices), `Mat`
11104: @*/
11105: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11106: {
11107: PetscMPIInt size;
11109: PetscFunctionBegin;
11110: PetscCallMPI(MPI_Comm_size(comm, &size));
11111: if (size == 1) {
11112: if (reuse == MAT_INITIAL_MATRIX) {
11113: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11114: } else {
11115: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11116: }
11117: PetscFunctionReturn(PETSC_SUCCESS);
11118: }
11120: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
11122: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11123: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11124: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11125: PetscFunctionReturn(PETSC_SUCCESS);
11126: }
11128: /*@
11129: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
11131: Collective
11133: Input Parameters:
11134: + A - the matrix to create subdomains from
11135: - N - requested number of subdomains
11137: Output Parameters:
11138: + n - number of subdomains resulting on this MPI process
11139: - iss - `IS` list with indices of subdomains on this MPI process
11141: Level: advanced
11143: Note:
11144: The number of subdomains must be smaller than the communicator size
11146: .seealso: [](ch_matrices), `Mat`, `IS`
11147: @*/
11148: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11149: {
11150: MPI_Comm comm, subcomm;
11151: PetscMPIInt size, rank, color;
11152: PetscInt rstart, rend, k;
11154: PetscFunctionBegin;
11155: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11156: PetscCallMPI(MPI_Comm_size(comm, &size));
11157: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11158: PetscCheck(N >= 1 && N < (PetscInt)size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11159: *n = 1;
11160: k = ((PetscInt)size) / N + ((PetscInt)size % N > 0); /* There are up to k ranks to a color */
11161: color = rank / k;
11162: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11163: PetscCall(PetscMalloc1(1, iss));
11164: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11165: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11166: PetscCallMPI(MPI_Comm_free(&subcomm));
11167: PetscFunctionReturn(PETSC_SUCCESS);
11168: }
11170: /*@
11171: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11173: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11174: If they are not the same, uses `MatMatMatMult()`.
11176: Once the coarse grid problem is constructed, correct for interpolation operators
11177: that are not of full rank, which can legitimately happen in the case of non-nested
11178: geometric multigrid.
11180: Input Parameters:
11181: + restrct - restriction operator
11182: . dA - fine grid matrix
11183: . interpolate - interpolation operator
11184: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11185: - fill - expected fill, use `PETSC_DEFAULT` if you do not have a good estimate
11187: Output Parameter:
11188: . A - the Galerkin coarse matrix
11190: Options Database Key:
11191: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
11193: Level: developer
11195: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11196: @*/
11197: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11198: {
11199: IS zerorows;
11200: Vec diag;
11202: PetscFunctionBegin;
11203: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11204: /* Construct the coarse grid matrix */
11205: if (interpolate == restrct) {
11206: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11207: } else {
11208: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11209: }
11211: /* If the interpolation matrix is not of full rank, A will have zero rows.
11212: This can legitimately happen in the case of non-nested geometric multigrid.
11213: In that event, we set the rows of the matrix to the rows of the identity,
11214: ignoring the equations (as the RHS will also be zero). */
11216: PetscCall(MatFindZeroRows(*A, &zerorows));
11218: if (zerorows != NULL) { /* if there are any zero rows */
11219: PetscCall(MatCreateVecs(*A, &diag, NULL));
11220: PetscCall(MatGetDiagonal(*A, diag));
11221: PetscCall(VecISSet(diag, zerorows, 1.0));
11222: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11223: PetscCall(VecDestroy(&diag));
11224: PetscCall(ISDestroy(&zerorows));
11225: }
11226: PetscFunctionReturn(PETSC_SUCCESS);
11227: }
11229: /*@C
11230: MatSetOperation - Allows user to set a matrix operation for any matrix type
11232: Logically Collective
11234: Input Parameters:
11235: + mat - the matrix
11236: . op - the name of the operation
11237: - f - the function that provides the operation
11239: Level: developer
11241: Example Usage:
11242: .vb
11243: extern PetscErrorCode usermult(Mat, Vec, Vec);
11245: PetscCall(MatCreateXXX(comm, ..., &A));
11246: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscVoidFn *)usermult));
11247: .ve
11249: Notes:
11250: See the file `include/petscmat.h` for a complete list of matrix
11251: operations, which all have the form MATOP_<OPERATION>, where
11252: <OPERATION> is the name (in all capital letters) of the
11253: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11255: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11256: sequence as the usual matrix interface routines, since they
11257: are intended to be accessed via the usual matrix interface
11258: routines, e.g.,
11259: .vb
11260: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11261: .ve
11263: In particular each function MUST return `PETSC_SUCCESS` on success and
11264: nonzero on failure.
11266: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11268: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11269: @*/
11270: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, void (*f)(void))
11271: {
11272: PetscFunctionBegin;
11274: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))mat->ops->view) mat->ops->viewnative = mat->ops->view;
11275: (((void (**)(void))mat->ops)[op]) = f;
11276: PetscFunctionReturn(PETSC_SUCCESS);
11277: }
11279: /*@C
11280: MatGetOperation - Gets a matrix operation for any matrix type.
11282: Not Collective
11284: Input Parameters:
11285: + mat - the matrix
11286: - op - the name of the operation
11288: Output Parameter:
11289: . f - the function that provides the operation
11291: Level: developer
11293: Example Usage:
11294: .vb
11295: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11297: MatGetOperation(A, MATOP_MULT, (void (**)(void))&usermult);
11298: .ve
11300: Notes:
11301: See the file include/petscmat.h for a complete list of matrix
11302: operations, which all have the form MATOP_<OPERATION>, where
11303: <OPERATION> is the name (in all capital letters) of the
11304: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11306: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11308: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11309: @*/
11310: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, void (**f)(void))
11311: {
11312: PetscFunctionBegin;
11314: *f = (((void (**)(void))mat->ops)[op]);
11315: PetscFunctionReturn(PETSC_SUCCESS);
11316: }
11318: /*@
11319: MatHasOperation - Determines whether the given matrix supports the particular operation.
11321: Not Collective
11323: Input Parameters:
11324: + mat - the matrix
11325: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11327: Output Parameter:
11328: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11330: Level: advanced
11332: Note:
11333: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11335: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11336: @*/
11337: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11338: {
11339: PetscFunctionBegin;
11341: PetscAssertPointer(has, 3);
11342: if (mat->ops->hasoperation) {
11343: PetscUseTypeMethod(mat, hasoperation, op, has);
11344: } else {
11345: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11346: else {
11347: *has = PETSC_FALSE;
11348: if (op == MATOP_CREATE_SUBMATRIX) {
11349: PetscMPIInt size;
11351: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11352: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11353: }
11354: }
11355: }
11356: PetscFunctionReturn(PETSC_SUCCESS);
11357: }
11359: /*@
11360: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11362: Collective
11364: Input Parameter:
11365: . mat - the matrix
11367: Output Parameter:
11368: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11370: Level: beginner
11372: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11373: @*/
11374: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11375: {
11376: PetscFunctionBegin;
11379: PetscAssertPointer(cong, 2);
11380: if (!mat->rmap || !mat->cmap) {
11381: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11382: PetscFunctionReturn(PETSC_SUCCESS);
11383: }
11384: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11385: PetscCall(PetscLayoutSetUp(mat->rmap));
11386: PetscCall(PetscLayoutSetUp(mat->cmap));
11387: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11388: if (*cong) mat->congruentlayouts = 1;
11389: else mat->congruentlayouts = 0;
11390: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11391: PetscFunctionReturn(PETSC_SUCCESS);
11392: }
11394: PetscErrorCode MatSetInf(Mat A)
11395: {
11396: PetscFunctionBegin;
11397: PetscUseTypeMethod(A, setinf);
11398: PetscFunctionReturn(PETSC_SUCCESS);
11399: }
11401: /*@
11402: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11403: and possibly removes small values from the graph structure.
11405: Collective
11407: Input Parameters:
11408: + A - the matrix
11409: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11410: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11411: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11412: . num_idx - size of 'index' array
11413: - index - array of block indices to use for graph strength of connection weight
11415: Output Parameter:
11416: . graph - the resulting graph
11418: Level: advanced
11420: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11421: @*/
11422: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11423: {
11424: PetscFunctionBegin;
11428: PetscAssertPointer(graph, 7);
11429: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11430: PetscFunctionReturn(PETSC_SUCCESS);
11431: }
11433: /*@
11434: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11435: meaning the same memory is used for the matrix, and no new memory is allocated.
11437: Collective
11439: Input Parameters:
11440: + A - the matrix
11441: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11443: Level: intermediate
11445: Developer Note:
11446: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11447: of the arrays in the data structure are unneeded.
11449: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11450: @*/
11451: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11452: {
11453: PetscFunctionBegin;
11455: PetscUseTypeMethod(A, eliminatezeros, keep);
11456: PetscFunctionReturn(PETSC_SUCCESS);
11457: }