Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_CreateGraph;
40: PetscLogEvent MAT_SetValuesBatch;
41: PetscLogEvent MAT_ViennaCLCopyToGPU;
42: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
43: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
44: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
45: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
46: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
47: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
49: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
51: /*@
52: MatSetRandom - Sets all components of a matrix to random numbers.
54: Logically Collective
56: Input Parameters:
57: + x - the matrix
58: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
59: it will create one internally.
61: Example:
62: .vb
63: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
64: MatSetRandom(x,rctx);
65: PetscRandomDestroy(rctx);
66: .ve
68: Level: intermediate
70: Notes:
71: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
73: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
75: It generates an error if used on unassembled sparse matrices that have not been preallocated.
77: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
78: @*/
79: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
80: {
81: PetscRandom randObj = NULL;
83: PetscFunctionBegin;
87: MatCheckPreallocated(x, 1);
89: if (!rctx) {
90: MPI_Comm comm;
91: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
92: PetscCall(PetscRandomCreate(comm, &randObj));
93: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
94: PetscCall(PetscRandomSetFromOptions(randObj));
95: rctx = randObj;
96: }
97: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
98: PetscUseTypeMethod(x, setrandom, rctx);
99: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
101: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
103: PetscCall(PetscRandomDestroy(&randObj));
104: PetscFunctionReturn(PETSC_SUCCESS);
105: }
107: /*@
108: MatCopyHashToXAIJ - copy hash table entries into an XAIJ matrix type
110: Logically Collective
112: Input Parameter:
113: . A - A matrix in unassembled, hash table form
115: Output Parameter:
116: . B - The XAIJ matrix. This can either be `A` or some matrix of equivalent size, e.g. obtained from `A` via `MatDuplicate()`
118: Example:
119: .vb
120: PetscCall(MatDuplicate(A, MAT_DO_NOT_COPY_VALUES, &B));
121: PetscCall(MatCopyHashToXAIJ(A, B));
122: .ve
124: Level: advanced
126: Notes:
127: If `B` is `A`, then the hash table data structure will be destroyed. `B` is assembled
129: .seealso: [](ch_matrices), `Mat`, `MAT_USE_HASH_TABLE`
130: @*/
131: PetscErrorCode MatCopyHashToXAIJ(Mat A, Mat B)
132: {
133: PetscFunctionBegin;
135: PetscUseTypeMethod(A, copyhashtoxaij, B);
136: PetscFunctionReturn(PETSC_SUCCESS);
137: }
139: /*@
140: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
142: Logically Collective
144: Input Parameter:
145: . mat - the factored matrix
147: Output Parameters:
148: + pivot - the pivot value computed
149: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
150: the share the matrix
152: Level: advanced
154: Notes:
155: This routine does not work for factorizations done with external packages.
157: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
159: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
161: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
162: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
163: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
164: @*/
165: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
166: {
167: PetscFunctionBegin;
169: PetscAssertPointer(pivot, 2);
170: PetscAssertPointer(row, 3);
171: *pivot = mat->factorerror_zeropivot_value;
172: *row = mat->factorerror_zeropivot_row;
173: PetscFunctionReturn(PETSC_SUCCESS);
174: }
176: /*@
177: MatFactorGetError - gets the error code from a factorization
179: Logically Collective
181: Input Parameter:
182: . mat - the factored matrix
184: Output Parameter:
185: . err - the error code
187: Level: advanced
189: Note:
190: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
192: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
193: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
194: @*/
195: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
196: {
197: PetscFunctionBegin;
199: PetscAssertPointer(err, 2);
200: *err = mat->factorerrortype;
201: PetscFunctionReturn(PETSC_SUCCESS);
202: }
204: /*@
205: MatFactorClearError - clears the error code in a factorization
207: Logically Collective
209: Input Parameter:
210: . mat - the factored matrix
212: Level: developer
214: Note:
215: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
217: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
218: `MatGetErrorCode()`, `MatFactorError`
219: @*/
220: PetscErrorCode MatFactorClearError(Mat mat)
221: {
222: PetscFunctionBegin;
224: mat->factorerrortype = MAT_FACTOR_NOERROR;
225: mat->factorerror_zeropivot_value = 0.0;
226: mat->factorerror_zeropivot_row = 0;
227: PetscFunctionReturn(PETSC_SUCCESS);
228: }
230: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
231: {
232: Vec r, l;
233: const PetscScalar *al;
234: PetscInt i, nz, gnz, N, n, st;
236: PetscFunctionBegin;
237: PetscCall(MatCreateVecs(mat, &r, &l));
238: if (!cols) { /* nonzero rows */
239: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
240: PetscCall(MatGetSize(mat, &N, NULL));
241: PetscCall(MatGetLocalSize(mat, &n, NULL));
242: PetscCall(VecSet(l, 0.0));
243: PetscCall(VecSetRandom(r, NULL));
244: PetscCall(MatMult(mat, r, l));
245: PetscCall(VecGetArrayRead(l, &al));
246: } else { /* nonzero columns */
247: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
248: PetscCall(MatGetSize(mat, NULL, &N));
249: PetscCall(MatGetLocalSize(mat, NULL, &n));
250: PetscCall(VecSet(r, 0.0));
251: PetscCall(VecSetRandom(l, NULL));
252: PetscCall(MatMultTranspose(mat, l, r));
253: PetscCall(VecGetArrayRead(r, &al));
254: }
255: if (tol <= 0.0) {
256: for (i = 0, nz = 0; i < n; i++)
257: if (al[i] != 0.0) nz++;
258: } else {
259: for (i = 0, nz = 0; i < n; i++)
260: if (PetscAbsScalar(al[i]) > tol) nz++;
261: }
262: PetscCallMPI(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
263: if (gnz != N) {
264: PetscInt *nzr;
265: PetscCall(PetscMalloc1(nz, &nzr));
266: if (nz) {
267: if (tol < 0) {
268: for (i = 0, nz = 0; i < n; i++)
269: if (al[i] != 0.0) nzr[nz++] = i + st;
270: } else {
271: for (i = 0, nz = 0; i < n; i++)
272: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
273: }
274: }
275: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
276: } else *nonzero = NULL;
277: if (!cols) { /* nonzero rows */
278: PetscCall(VecRestoreArrayRead(l, &al));
279: } else {
280: PetscCall(VecRestoreArrayRead(r, &al));
281: }
282: PetscCall(VecDestroy(&l));
283: PetscCall(VecDestroy(&r));
284: PetscFunctionReturn(PETSC_SUCCESS);
285: }
287: /*@
288: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
290: Input Parameter:
291: . mat - the matrix
293: Output Parameter:
294: . keptrows - the rows that are not completely zero
296: Level: intermediate
298: Note:
299: `keptrows` is set to `NULL` if all rows are nonzero.
301: Developer Note:
302: If `keptrows` is not `NULL`, it must be sorted.
304: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
305: @*/
306: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
307: {
308: PetscFunctionBegin;
311: PetscAssertPointer(keptrows, 2);
312: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
313: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
314: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
315: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
316: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
317: PetscFunctionReturn(PETSC_SUCCESS);
318: }
320: /*@
321: MatFindZeroRows - Locate all rows that are completely zero in the matrix
323: Input Parameter:
324: . mat - the matrix
326: Output Parameter:
327: . zerorows - the rows that are completely zero
329: Level: intermediate
331: Note:
332: `zerorows` is set to `NULL` if no rows are zero.
334: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
335: @*/
336: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
337: {
338: IS keptrows;
339: PetscInt m, n;
341: PetscFunctionBegin;
344: PetscAssertPointer(zerorows, 2);
345: PetscCall(MatFindNonzeroRows(mat, &keptrows));
346: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
347: In keeping with this convention, we set zerorows to NULL if there are no zero
348: rows. */
349: if (keptrows == NULL) {
350: *zerorows = NULL;
351: } else {
352: PetscCall(MatGetOwnershipRange(mat, &m, &n));
353: PetscCall(ISComplement(keptrows, m, n, zerorows));
354: PetscCall(ISDestroy(&keptrows));
355: }
356: PetscFunctionReturn(PETSC_SUCCESS);
357: }
359: /*@
360: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
362: Not Collective
364: Input Parameter:
365: . A - the matrix
367: Output Parameter:
368: . a - the diagonal part (which is a SEQUENTIAL matrix)
370: Level: advanced
372: Notes:
373: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
375: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
377: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
378: @*/
379: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
380: {
381: PetscFunctionBegin;
384: PetscAssertPointer(a, 2);
385: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
386: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
387: else {
388: PetscMPIInt size;
390: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
391: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
392: *a = A;
393: }
394: PetscFunctionReturn(PETSC_SUCCESS);
395: }
397: /*@
398: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
400: Collective
402: Input Parameter:
403: . mat - the matrix
405: Output Parameter:
406: . trace - the sum of the diagonal entries
408: Level: advanced
410: .seealso: [](ch_matrices), `Mat`
411: @*/
412: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
413: {
414: Vec diag;
416: PetscFunctionBegin;
418: PetscAssertPointer(trace, 2);
419: PetscCall(MatCreateVecs(mat, &diag, NULL));
420: PetscCall(MatGetDiagonal(mat, diag));
421: PetscCall(VecSum(diag, trace));
422: PetscCall(VecDestroy(&diag));
423: PetscFunctionReturn(PETSC_SUCCESS);
424: }
426: /*@
427: MatRealPart - Zeros out the imaginary part of the matrix
429: Logically Collective
431: Input Parameter:
432: . mat - the matrix
434: Level: advanced
436: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
437: @*/
438: PetscErrorCode MatRealPart(Mat mat)
439: {
440: PetscFunctionBegin;
443: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
444: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
445: MatCheckPreallocated(mat, 1);
446: PetscUseTypeMethod(mat, realpart);
447: PetscFunctionReturn(PETSC_SUCCESS);
448: }
450: /*@C
451: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
453: Collective
455: Input Parameter:
456: . mat - the matrix
458: Output Parameters:
459: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
460: - ghosts - the global indices of the ghost points
462: Level: advanced
464: Note:
465: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
467: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
468: @*/
469: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
470: {
471: PetscFunctionBegin;
474: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
475: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
476: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
477: else {
478: if (nghosts) *nghosts = 0;
479: if (ghosts) *ghosts = NULL;
480: }
481: PetscFunctionReturn(PETSC_SUCCESS);
482: }
484: /*@
485: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
487: Logically Collective
489: Input Parameter:
490: . mat - the matrix
492: Level: advanced
494: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
495: @*/
496: PetscErrorCode MatImaginaryPart(Mat mat)
497: {
498: PetscFunctionBegin;
501: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
502: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
503: MatCheckPreallocated(mat, 1);
504: PetscUseTypeMethod(mat, imaginarypart);
505: PetscFunctionReturn(PETSC_SUCCESS);
506: }
508: /*@
509: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure
511: Not Collective
513: Input Parameter:
514: . mat - the matrix
516: Output Parameters:
517: + missing - is any diagonal entry missing
518: - dd - first diagonal entry that is missing (optional) on this process
520: Level: advanced
522: Note:
523: This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value
525: .seealso: [](ch_matrices), `Mat`
526: @*/
527: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
528: {
529: PetscFunctionBegin;
532: PetscAssertPointer(missing, 2);
533: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
534: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
535: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
536: PetscFunctionReturn(PETSC_SUCCESS);
537: }
539: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
540: /*@C
541: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
542: for each row that you get to ensure that your application does
543: not bleed memory.
545: Not Collective
547: Input Parameters:
548: + mat - the matrix
549: - row - the row to get
551: Output Parameters:
552: + ncols - if not `NULL`, the number of nonzeros in `row`
553: . cols - if not `NULL`, the column numbers
554: - vals - if not `NULL`, the numerical values
556: Level: advanced
558: Notes:
559: This routine is provided for people who need to have direct access
560: to the structure of a matrix. We hope that we provide enough
561: high-level matrix routines that few users will need it.
563: `MatGetRow()` always returns 0-based column indices, regardless of
564: whether the internal representation is 0-based (default) or 1-based.
566: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
567: not wish to extract these quantities.
569: The user can only examine the values extracted with `MatGetRow()`;
570: the values CANNOT be altered. To change the matrix entries, one
571: must use `MatSetValues()`.
573: You can only have one call to `MatGetRow()` outstanding for a particular
574: matrix at a time, per processor. `MatGetRow()` can only obtain rows
575: associated with the given processor, it cannot get rows from the
576: other processors; for that we suggest using `MatCreateSubMatrices()`, then
577: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
578: is in the global number of rows.
580: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
582: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
584: Fortran Note:
585: .vb
586: PetscInt, pointer :: cols(:)
587: PetscScalar, pointer :: vals(:)
588: .ve
590: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
591: @*/
592: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
593: {
594: PetscInt incols;
596: PetscFunctionBegin;
599: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
600: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
601: MatCheckPreallocated(mat, 1);
602: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
603: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
604: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
605: if (ncols) *ncols = incols;
606: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
607: PetscFunctionReturn(PETSC_SUCCESS);
608: }
610: /*@
611: MatConjugate - replaces the matrix values with their complex conjugates
613: Logically Collective
615: Input Parameter:
616: . mat - the matrix
618: Level: advanced
620: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
621: @*/
622: PetscErrorCode MatConjugate(Mat mat)
623: {
624: PetscFunctionBegin;
626: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
627: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
628: PetscUseTypeMethod(mat, conjugate);
629: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
630: }
631: PetscFunctionReturn(PETSC_SUCCESS);
632: }
634: /*@C
635: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
637: Not Collective
639: Input Parameters:
640: + mat - the matrix
641: . row - the row to get
642: . ncols - the number of nonzeros
643: . cols - the columns of the nonzeros
644: - vals - if nonzero the column values
646: Level: advanced
648: Notes:
649: This routine should be called after you have finished examining the entries.
651: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
652: us of the array after it has been restored. If you pass `NULL`, it will
653: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
655: Fortran Note:
656: .vb
657: PetscInt, pointer :: cols(:)
658: PetscScalar, pointer :: vals(:)
659: .ve
661: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
662: @*/
663: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
664: {
665: PetscFunctionBegin;
667: if (ncols) PetscAssertPointer(ncols, 3);
668: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
669: PetscTryTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
670: if (ncols) *ncols = 0;
671: if (cols) *cols = NULL;
672: if (vals) *vals = NULL;
673: PetscFunctionReturn(PETSC_SUCCESS);
674: }
676: /*@
677: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
678: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
680: Not Collective
682: Input Parameter:
683: . mat - the matrix
685: Level: advanced
687: Note:
688: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
690: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
691: @*/
692: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
693: {
694: PetscFunctionBegin;
697: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
698: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
699: MatCheckPreallocated(mat, 1);
700: PetscTryTypeMethod(mat, getrowuppertriangular);
701: PetscFunctionReturn(PETSC_SUCCESS);
702: }
704: /*@
705: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
707: Not Collective
709: Input Parameter:
710: . mat - the matrix
712: Level: advanced
714: Note:
715: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
717: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
718: @*/
719: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
720: {
721: PetscFunctionBegin;
724: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
725: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
726: MatCheckPreallocated(mat, 1);
727: PetscTryTypeMethod(mat, restorerowuppertriangular);
728: PetscFunctionReturn(PETSC_SUCCESS);
729: }
731: /*@
732: MatSetOptionsPrefix - Sets the prefix used for searching for all
733: `Mat` options in the database.
735: Logically Collective
737: Input Parameters:
738: + A - the matrix
739: - prefix - the prefix to prepend to all option names
741: Level: advanced
743: Notes:
744: A hyphen (-) must NOT be given at the beginning of the prefix name.
745: The first character of all runtime options is AUTOMATICALLY the hyphen.
747: This is NOT used for options for the factorization of the matrix. Normally the
748: prefix is automatically passed in from the PC calling the factorization. To set
749: it directly use `MatSetOptionsPrefixFactor()`
751: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
752: @*/
753: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
754: {
755: PetscFunctionBegin;
757: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
758: PetscTryMethod(A, "MatSetOptionsPrefix_C", (Mat, const char[]), (A, prefix));
759: PetscFunctionReturn(PETSC_SUCCESS);
760: }
762: /*@
763: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
764: for matrices created with `MatGetFactor()`
766: Logically Collective
768: Input Parameters:
769: + A - the matrix
770: - prefix - the prefix to prepend to all option names for the factored matrix
772: Level: developer
774: Notes:
775: A hyphen (-) must NOT be given at the beginning of the prefix name.
776: The first character of all runtime options is AUTOMATICALLY the hyphen.
778: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
779: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
781: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
782: @*/
783: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
784: {
785: PetscFunctionBegin;
787: if (prefix) {
788: PetscAssertPointer(prefix, 2);
789: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
790: if (prefix != A->factorprefix) {
791: PetscCall(PetscFree(A->factorprefix));
792: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
793: }
794: } else PetscCall(PetscFree(A->factorprefix));
795: PetscFunctionReturn(PETSC_SUCCESS);
796: }
798: /*@
799: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
800: for matrices created with `MatGetFactor()`
802: Logically Collective
804: Input Parameters:
805: + A - the matrix
806: - prefix - the prefix to prepend to all option names for the factored matrix
808: Level: developer
810: Notes:
811: A hyphen (-) must NOT be given at the beginning of the prefix name.
812: The first character of all runtime options is AUTOMATICALLY the hyphen.
814: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
815: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
817: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
818: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
819: `MatSetOptionsPrefix()`
820: @*/
821: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
822: {
823: size_t len1, len2, new_len;
825: PetscFunctionBegin;
827: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
828: if (!A->factorprefix) {
829: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
830: PetscFunctionReturn(PETSC_SUCCESS);
831: }
832: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
834: PetscCall(PetscStrlen(A->factorprefix, &len1));
835: PetscCall(PetscStrlen(prefix, &len2));
836: new_len = len1 + len2 + 1;
837: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
838: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
839: PetscFunctionReturn(PETSC_SUCCESS);
840: }
842: /*@
843: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
844: matrix options in the database.
846: Logically Collective
848: Input Parameters:
849: + A - the matrix
850: - prefix - the prefix to prepend to all option names
852: Level: advanced
854: Note:
855: A hyphen (-) must NOT be given at the beginning of the prefix name.
856: The first character of all runtime options is AUTOMATICALLY the hyphen.
858: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
859: @*/
860: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
861: {
862: PetscFunctionBegin;
864: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
865: PetscTryMethod(A, "MatAppendOptionsPrefix_C", (Mat, const char[]), (A, prefix));
866: PetscFunctionReturn(PETSC_SUCCESS);
867: }
869: /*@
870: MatGetOptionsPrefix - Gets the prefix used for searching for all
871: matrix options in the database.
873: Not Collective
875: Input Parameter:
876: . A - the matrix
878: Output Parameter:
879: . prefix - pointer to the prefix string used
881: Level: advanced
883: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
884: @*/
885: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
886: {
887: PetscFunctionBegin;
889: PetscAssertPointer(prefix, 2);
890: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
891: PetscFunctionReturn(PETSC_SUCCESS);
892: }
894: /*@
895: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
897: Not Collective
899: Input Parameter:
900: . A - the matrix
902: Output Parameter:
903: . state - the object state
905: Level: advanced
907: Note:
908: Object state is an integer which gets increased every time
909: the object is changed. By saving and later querying the object state
910: one can determine whether information about the object is still current.
912: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
914: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
915: @*/
916: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
917: {
918: PetscFunctionBegin;
920: PetscAssertPointer(state, 2);
921: PetscCall(PetscObjectStateGet((PetscObject)A, state));
922: PetscFunctionReturn(PETSC_SUCCESS);
923: }
925: /*@
926: MatResetPreallocation - Reset matrix to use the original preallocation values provided by the user, for example with `MatXAIJSetPreallocation()`
928: Collective
930: Input Parameter:
931: . A - the matrix
933: Level: beginner
935: Notes:
936: After calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY` the matrix data structures represent the nonzeros assigned to the
937: matrix. If that space is less than the preallocated space that extra preallocated space is no longer available to take on new values. `MatResetPreallocation()`
938: makes all of the preallocation space available
940: Current values in the matrix are lost in this call
942: Currently only supported for `MATAIJ` matrices.
944: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
945: @*/
946: PetscErrorCode MatResetPreallocation(Mat A)
947: {
948: PetscFunctionBegin;
951: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
952: PetscFunctionReturn(PETSC_SUCCESS);
953: }
955: /*@
956: MatResetHash - Reset the matrix so that it will use a hash table for the next round of `MatSetValues()` and `MatAssemblyBegin()`/`MatAssemblyEnd()`.
958: Collective
960: Input Parameter:
961: . A - the matrix
963: Level: intermediate
965: Notes:
966: The matrix will again delete the hash table data structures after following calls to `MatAssemblyBegin()`/`MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
968: Currently only supported for `MATAIJ` matrices.
970: .seealso: [](ch_matrices), `Mat`, `MatResetPreallocation()`
971: @*/
972: PetscErrorCode MatResetHash(Mat A)
973: {
974: PetscFunctionBegin;
977: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset to hash state after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
978: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
979: PetscUseMethod(A, "MatResetHash_C", (Mat), (A));
980: /* These flags are used to determine whether certain setups occur */
981: A->was_assembled = PETSC_FALSE;
982: A->assembled = PETSC_FALSE;
983: /* Log that the state of this object has changed; this will help guarantee that preconditioners get re-setup */
984: PetscCall(PetscObjectStateIncrease((PetscObject)A));
985: PetscFunctionReturn(PETSC_SUCCESS);
986: }
988: /*@
989: MatSetUp - Sets up the internal matrix data structures for later use by the matrix
991: Collective
993: Input Parameter:
994: . A - the matrix
996: Level: advanced
998: Notes:
999: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
1000: setting values in the matrix.
1002: This routine is called internally by other `Mat` functions when needed so rarely needs to be called by users
1004: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
1005: @*/
1006: PetscErrorCode MatSetUp(Mat A)
1007: {
1008: PetscFunctionBegin;
1010: if (!((PetscObject)A)->type_name) {
1011: PetscMPIInt size;
1013: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1014: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
1015: }
1016: if (!A->preallocated) PetscTryTypeMethod(A, setup);
1017: PetscCall(PetscLayoutSetUp(A->rmap));
1018: PetscCall(PetscLayoutSetUp(A->cmap));
1019: A->preallocated = PETSC_TRUE;
1020: PetscFunctionReturn(PETSC_SUCCESS);
1021: }
1023: #if defined(PETSC_HAVE_SAWS)
1024: #include <petscviewersaws.h>
1025: #endif
1027: /*
1028: If threadsafety is on extraneous matrices may be printed
1030: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
1031: */
1032: #if !defined(PETSC_HAVE_THREADSAFETY)
1033: static PetscInt insidematview = 0;
1034: #endif
1036: /*@
1037: MatViewFromOptions - View properties of the matrix based on options set in the options database
1039: Collective
1041: Input Parameters:
1042: + A - the matrix
1043: . obj - optional additional object that provides the options prefix to use
1044: - name - command line option
1046: Options Database Key:
1047: . -mat_view [viewertype]:... - the viewer and its options
1049: Level: intermediate
1051: Note:
1052: .vb
1053: If no value is provided ascii:stdout is used
1054: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
1055: for example ascii::ascii_info prints just the information about the object not all details
1056: unless :append is given filename opens in write mode, overwriting what was already there
1057: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
1058: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
1059: socket[:port] defaults to the standard output port
1060: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
1061: .ve
1063: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1064: @*/
1065: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1066: {
1067: PetscFunctionBegin;
1069: #if !defined(PETSC_HAVE_THREADSAFETY)
1070: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1071: #endif
1072: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1073: PetscFunctionReturn(PETSC_SUCCESS);
1074: }
1076: /*@
1077: MatView - display information about a matrix in a variety ways
1079: Collective on viewer
1081: Input Parameters:
1082: + mat - the matrix
1083: - viewer - visualization context
1085: Options Database Keys:
1086: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1087: . -mat_view ::ascii_info_detail - Prints more detailed info
1088: . -mat_view - Prints matrix in ASCII format
1089: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1090: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1091: . -display <name> - Sets display name (default is host)
1092: . -draw_pause <sec> - Sets number of seconds to pause after display
1093: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1094: . -viewer_socket_machine <machine> - -
1095: . -viewer_socket_port <port> - -
1096: . -mat_view binary - save matrix to file in binary format
1097: - -viewer_binary_filename <name> - -
1099: Level: beginner
1101: Notes:
1102: The available visualization contexts include
1103: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1104: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1105: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1106: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1108: The user can open alternative visualization contexts with
1109: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1110: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a specified file; corresponding input uses `MatLoad()`
1111: . `PetscViewerDrawOpen()` - Outputs nonzero matrix nonzero structure to an X window display
1112: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer, `PETSCVIEWERSOCKET`. Only the `MATSEQDENSE` and `MATAIJ` types support this viewer.
1114: The user can call `PetscViewerPushFormat()` to specify the output
1115: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1116: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1117: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1118: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1119: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1120: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse format common among all matrix types
1121: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific format (which is in many cases the same as the default)
1122: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix size and structure (not the matrix entries)
1123: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about the matrix nonzero structure (still not vector or matrix entries)
1125: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1126: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1128: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1130: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1131: viewer is used.
1133: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1134: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1136: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1137: and then use the following mouse functions.
1138: .vb
1139: left mouse: zoom in
1140: middle mouse: zoom out
1141: right mouse: continue with the simulation
1142: .ve
1144: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1145: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1146: @*/
1147: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1148: {
1149: PetscInt rows, cols, rbs, cbs;
1150: PetscBool isascii, isstring, issaws;
1151: PetscViewerFormat format;
1152: PetscMPIInt size;
1154: PetscFunctionBegin;
1157: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1160: PetscCall(PetscViewerGetFormat(viewer, &format));
1161: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1162: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1164: #if !defined(PETSC_HAVE_THREADSAFETY)
1165: insidematview++;
1166: #endif
1167: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1168: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1169: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1170: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1172: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1173: if (isascii) {
1174: if (!mat->preallocated) {
1175: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1176: #if !defined(PETSC_HAVE_THREADSAFETY)
1177: insidematview--;
1178: #endif
1179: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1180: PetscFunctionReturn(PETSC_SUCCESS);
1181: }
1182: if (!mat->assembled) {
1183: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1184: #if !defined(PETSC_HAVE_THREADSAFETY)
1185: insidematview--;
1186: #endif
1187: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1188: PetscFunctionReturn(PETSC_SUCCESS);
1189: }
1190: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1191: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1192: MatNullSpace nullsp, transnullsp;
1194: PetscCall(PetscViewerASCIIPushTab(viewer));
1195: PetscCall(MatGetSize(mat, &rows, &cols));
1196: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1197: if (rbs != 1 || cbs != 1) {
1198: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1199: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1200: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1201: if (mat->factortype) {
1202: MatSolverType solver;
1203: PetscCall(MatFactorGetSolverType(mat, &solver));
1204: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1205: }
1206: if (mat->ops->getinfo) {
1207: PetscBool is_constant_or_diagonal;
1209: // Don't print nonzero information for constant or diagonal matrices, it just adds noise to the output
1210: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &is_constant_or_diagonal, MATCONSTANTDIAGONAL, MATDIAGONAL, ""));
1211: if (!is_constant_or_diagonal) {
1212: MatInfo info;
1214: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1215: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1216: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1217: }
1218: }
1219: PetscCall(MatGetNullSpace(mat, &nullsp));
1220: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1221: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1222: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1223: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1224: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1225: PetscCall(PetscViewerASCIIPushTab(viewer));
1226: PetscCall(MatProductView(mat, viewer));
1227: PetscCall(PetscViewerASCIIPopTab(viewer));
1228: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1229: IS tmp;
1231: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1232: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1233: PetscCall(PetscViewerASCIIPushTab(viewer));
1234: PetscCall(ISView(tmp, viewer));
1235: PetscCall(PetscViewerASCIIPopTab(viewer));
1236: PetscCall(ISDestroy(&tmp));
1237: }
1238: }
1239: } else if (issaws) {
1240: #if defined(PETSC_HAVE_SAWS)
1241: PetscMPIInt rank;
1243: PetscCall(PetscObjectName((PetscObject)mat));
1244: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1245: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1246: #endif
1247: } else if (isstring) {
1248: const char *type;
1249: PetscCall(MatGetType(mat, &type));
1250: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1251: PetscTryTypeMethod(mat, view, viewer);
1252: }
1253: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1254: PetscCall(PetscViewerASCIIPushTab(viewer));
1255: PetscUseTypeMethod(mat, viewnative, viewer);
1256: PetscCall(PetscViewerASCIIPopTab(viewer));
1257: } else if (mat->ops->view) {
1258: PetscCall(PetscViewerASCIIPushTab(viewer));
1259: PetscUseTypeMethod(mat, view, viewer);
1260: PetscCall(PetscViewerASCIIPopTab(viewer));
1261: }
1262: if (isascii) {
1263: PetscCall(PetscViewerGetFormat(viewer, &format));
1264: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1265: }
1266: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1267: #if !defined(PETSC_HAVE_THREADSAFETY)
1268: insidematview--;
1269: #endif
1270: PetscFunctionReturn(PETSC_SUCCESS);
1271: }
1273: #if defined(PETSC_USE_DEBUG)
1274: #include <../src/sys/totalview/tv_data_display.h>
1275: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1276: {
1277: TV_add_row("Local rows", "int", &mat->rmap->n);
1278: TV_add_row("Local columns", "int", &mat->cmap->n);
1279: TV_add_row("Global rows", "int", &mat->rmap->N);
1280: TV_add_row("Global columns", "int", &mat->cmap->N);
1281: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1282: return TV_format_OK;
1283: }
1284: #endif
1286: /*@
1287: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1288: with `MatView()`. The matrix format is determined from the options database.
1289: Generates a parallel MPI matrix if the communicator has more than one
1290: processor. The default matrix type is `MATAIJ`.
1292: Collective
1294: Input Parameters:
1295: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1296: or some related function before a call to `MatLoad()`
1297: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1299: Options Database Key:
1300: . -matload_block_size <bs> - set block size
1302: Level: beginner
1304: Notes:
1305: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1306: `Mat` before calling this routine if you wish to set it from the options database.
1308: `MatLoad()` automatically loads into the options database any options
1309: given in the file filename.info where filename is the name of the file
1310: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1311: file will be ignored if you use the -viewer_binary_skip_info option.
1313: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1314: sets the default matrix type AIJ and sets the local and global sizes.
1315: If type and/or size is already set, then the same are used.
1317: In parallel, each processor can load a subset of rows (or the
1318: entire matrix). This routine is especially useful when a large
1319: matrix is stored on disk and only part of it is desired on each
1320: processor. For example, a parallel solver may access only some of
1321: the rows from each processor. The algorithm used here reads
1322: relatively small blocks of data rather than reading the entire
1323: matrix and then subsetting it.
1325: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1326: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1327: or the sequence like
1328: .vb
1329: `PetscViewer` v;
1330: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1331: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1332: `PetscViewerSetFromOptions`(v);
1333: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1334: `PetscViewerFileSetName`(v,"datafile");
1335: .ve
1336: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1337: .vb
1338: -viewer_type {binary, hdf5}
1339: .ve
1341: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1342: and src/mat/tutorials/ex10.c with the second approach.
1344: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1345: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1346: Multiple objects, both matrices and vectors, can be stored within the same file.
1347: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1349: Most users should not need to know the details of the binary storage
1350: format, since `MatLoad()` and `MatView()` completely hide these details.
1351: But for anyone who is interested, the standard binary matrix storage
1352: format is
1354: .vb
1355: PetscInt MAT_FILE_CLASSID
1356: PetscInt number of rows
1357: PetscInt number of columns
1358: PetscInt total number of nonzeros
1359: PetscInt *number nonzeros in each row
1360: PetscInt *column indices of all nonzeros (starting index is zero)
1361: PetscScalar *values of all nonzeros
1362: .ve
1363: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1364: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1365: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1367: PETSc automatically does the byte swapping for
1368: machines that store the bytes reversed. Thus if you write your own binary
1369: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1370: and `PetscBinaryWrite()` to see how this may be done.
1372: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1373: Each processor's chunk is loaded independently by its owning MPI process.
1374: Multiple objects, both matrices and vectors, can be stored within the same file.
1375: They are looked up by their PetscObject name.
1377: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1378: by default the same structure and naming of the AIJ arrays and column count
1379: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1380: .vb
1381: save example.mat A b -v7.3
1382: .ve
1383: can be directly read by this routine (see Reference 1 for details).
1385: Depending on your MATLAB version, this format might be a default,
1386: otherwise you can set it as default in Preferences.
1388: Unless -nocompression flag is used to save the file in MATLAB,
1389: PETSc must be configured with ZLIB package.
1391: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1393: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1395: Corresponding `MatView()` is not yet implemented.
1397: The loaded matrix is actually a transpose of the original one in MATLAB,
1398: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1399: With this format, matrix is automatically transposed by PETSc,
1400: unless the matrix is marked as SPD or symmetric
1401: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1403: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1405: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1406: @*/
1407: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1408: {
1409: PetscBool flg;
1411: PetscFunctionBegin;
1415: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1417: flg = PETSC_FALSE;
1418: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1419: if (flg) {
1420: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1421: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1422: }
1423: flg = PETSC_FALSE;
1424: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1425: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1427: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1428: PetscUseTypeMethod(mat, load, viewer);
1429: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1430: PetscFunctionReturn(PETSC_SUCCESS);
1431: }
1433: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1434: {
1435: Mat_Redundant *redund = *redundant;
1437: PetscFunctionBegin;
1438: if (redund) {
1439: if (redund->matseq) { /* via MatCreateSubMatrices() */
1440: PetscCall(ISDestroy(&redund->isrow));
1441: PetscCall(ISDestroy(&redund->iscol));
1442: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1443: } else {
1444: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1445: PetscCall(PetscFree(redund->sbuf_j));
1446: PetscCall(PetscFree(redund->sbuf_a));
1447: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1448: PetscCall(PetscFree(redund->rbuf_j[i]));
1449: PetscCall(PetscFree(redund->rbuf_a[i]));
1450: }
1451: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1452: }
1454: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1455: PetscCall(PetscFree(redund));
1456: }
1457: PetscFunctionReturn(PETSC_SUCCESS);
1458: }
1460: /*@
1461: MatDestroy - Frees space taken by a matrix.
1463: Collective
1465: Input Parameter:
1466: . A - the matrix
1468: Level: beginner
1470: Developer Note:
1471: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1472: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1473: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1474: if changes are needed here.
1476: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1477: @*/
1478: PetscErrorCode MatDestroy(Mat *A)
1479: {
1480: PetscFunctionBegin;
1481: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1483: if (--((PetscObject)*A)->refct > 0) {
1484: *A = NULL;
1485: PetscFunctionReturn(PETSC_SUCCESS);
1486: }
1488: /* if memory was published with SAWs then destroy it */
1489: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1490: PetscTryTypeMethod(*A, destroy);
1492: PetscCall(PetscFree((*A)->factorprefix));
1493: PetscCall(PetscFree((*A)->defaultvectype));
1494: PetscCall(PetscFree((*A)->defaultrandtype));
1495: PetscCall(PetscFree((*A)->bsizes));
1496: PetscCall(PetscFree((*A)->solvertype));
1497: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1498: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1499: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1500: PetscCall(MatProductClear(*A));
1501: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1502: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1503: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1504: PetscCall(MatDestroy(&(*A)->schur));
1505: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1506: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1507: PetscCall(PetscHeaderDestroy(A));
1508: PetscFunctionReturn(PETSC_SUCCESS);
1509: }
1511: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1512: /*@
1513: MatSetValues - Inserts or adds a block of values into a matrix.
1514: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1515: MUST be called after all calls to `MatSetValues()` have been completed.
1517: Not Collective
1519: Input Parameters:
1520: + mat - the matrix
1521: . m - the number of rows
1522: . idxm - the global indices of the rows
1523: . n - the number of columns
1524: . idxn - the global indices of the columns
1525: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1526: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1527: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1529: Level: beginner
1531: Notes:
1532: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1533: options cannot be mixed without intervening calls to the assembly
1534: routines.
1536: `MatSetValues()` uses 0-based row and column numbers in Fortran
1537: as well as in C.
1539: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1540: simply ignored. This allows easily inserting element stiffness matrices
1541: with homogeneous Dirichlet boundary conditions that you don't want represented
1542: in the matrix.
1544: Efficiency Alert:
1545: The routine `MatSetValuesBlocked()` may offer much better efficiency
1546: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1548: Fortran Notes:
1549: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1550: .vb
1551: call MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
1552: .ve
1554: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1556: Developer Note:
1557: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1558: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1560: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1561: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1562: @*/
1563: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1564: {
1565: PetscFunctionBeginHot;
1568: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1569: PetscAssertPointer(idxm, 3);
1570: PetscAssertPointer(idxn, 5);
1571: MatCheckPreallocated(mat, 1);
1573: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1574: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1576: if (PetscDefined(USE_DEBUG)) {
1577: PetscInt i, j;
1579: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1580: if (v) {
1581: for (i = 0; i < m; i++) {
1582: for (j = 0; j < n; j++) {
1583: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1584: #if defined(PETSC_USE_COMPLEX)
1585: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1586: #else
1587: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1588: #endif
1589: }
1590: }
1591: }
1592: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1593: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1594: }
1596: if (mat->assembled) {
1597: mat->was_assembled = PETSC_TRUE;
1598: mat->assembled = PETSC_FALSE;
1599: }
1600: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1601: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1602: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1603: PetscFunctionReturn(PETSC_SUCCESS);
1604: }
1606: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1607: /*@
1608: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1609: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1610: MUST be called after all calls to `MatSetValues()` have been completed.
1612: Not Collective
1614: Input Parameters:
1615: + mat - the matrix
1616: . ism - the rows to provide
1617: . isn - the columns to provide
1618: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1619: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1620: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1622: Level: beginner
1624: Notes:
1625: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1627: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1628: options cannot be mixed without intervening calls to the assembly
1629: routines.
1631: `MatSetValues()` uses 0-based row and column numbers in Fortran
1632: as well as in C.
1634: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1635: simply ignored. This allows easily inserting element stiffness matrices
1636: with homogeneous Dirichlet boundary conditions that you don't want represented
1637: in the matrix.
1639: Fortran Note:
1640: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1642: Efficiency Alert:
1643: The routine `MatSetValuesBlocked()` may offer much better efficiency
1644: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1646: This is currently not optimized for any particular `ISType`
1648: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1649: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1650: @*/
1651: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1652: {
1653: PetscInt m, n;
1654: const PetscInt *rows, *cols;
1656: PetscFunctionBeginHot;
1658: PetscCall(ISGetIndices(ism, &rows));
1659: PetscCall(ISGetIndices(isn, &cols));
1660: PetscCall(ISGetLocalSize(ism, &m));
1661: PetscCall(ISGetLocalSize(isn, &n));
1662: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1663: PetscCall(ISRestoreIndices(ism, &rows));
1664: PetscCall(ISRestoreIndices(isn, &cols));
1665: PetscFunctionReturn(PETSC_SUCCESS);
1666: }
1668: /*@
1669: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1670: values into a matrix
1672: Not Collective
1674: Input Parameters:
1675: + mat - the matrix
1676: . row - the (block) row to set
1677: - v - a one-dimensional array that contains the values. For `MATBAIJ` they are implicitly stored as a two-dimensional array, by default in row-major order.
1678: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1680: Level: intermediate
1682: Notes:
1683: The values, `v`, are column-oriented (for the block version) and sorted
1685: All the nonzero values in `row` must be provided
1687: The matrix must have previously had its column indices set, likely by having been assembled.
1689: `row` must belong to this MPI process
1691: Fortran Note:
1692: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1694: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1695: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1696: @*/
1697: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1698: {
1699: PetscInt globalrow;
1701: PetscFunctionBegin;
1704: PetscAssertPointer(v, 3);
1705: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1706: PetscCall(MatSetValuesRow(mat, globalrow, v));
1707: PetscFunctionReturn(PETSC_SUCCESS);
1708: }
1710: /*@
1711: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1712: values into a matrix
1714: Not Collective
1716: Input Parameters:
1717: + mat - the matrix
1718: . row - the (block) row to set
1719: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1721: Level: advanced
1723: Notes:
1724: The values, `v`, are column-oriented for the block version.
1726: All the nonzeros in `row` must be provided
1728: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1730: `row` must belong to this process
1732: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1733: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1734: @*/
1735: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1736: {
1737: PetscFunctionBeginHot;
1740: MatCheckPreallocated(mat, 1);
1741: PetscAssertPointer(v, 3);
1742: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1743: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1744: mat->insertmode = INSERT_VALUES;
1746: if (mat->assembled) {
1747: mat->was_assembled = PETSC_TRUE;
1748: mat->assembled = PETSC_FALSE;
1749: }
1750: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1751: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1752: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1753: PetscFunctionReturn(PETSC_SUCCESS);
1754: }
1756: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1757: /*@
1758: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1759: Using structured grid indexing
1761: Not Collective
1763: Input Parameters:
1764: + mat - the matrix
1765: . m - number of rows being entered
1766: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1767: . n - number of columns being entered
1768: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1769: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1770: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1771: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1773: Level: beginner
1775: Notes:
1776: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1778: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1779: options cannot be mixed without intervening calls to the assembly
1780: routines.
1782: The grid coordinates are across the entire grid, not just the local portion
1784: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1785: as well as in C.
1787: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1789: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1790: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1792: The columns and rows in the stencil passed in MUST be contained within the
1793: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1794: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1795: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1796: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1798: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1799: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1800: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1801: `DM_BOUNDARY_PERIODIC` boundary type.
1803: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1804: a single value per point) you can skip filling those indices.
1806: Inspired by the structured grid interface to the HYPRE package
1807: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1809: Fortran Note:
1810: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1812: Efficiency Alert:
1813: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1814: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1816: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1817: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1818: @*/
1819: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1820: {
1821: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1822: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1823: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1825: PetscFunctionBegin;
1826: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1829: PetscAssertPointer(idxm, 3);
1830: PetscAssertPointer(idxn, 5);
1832: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1833: jdxm = buf;
1834: jdxn = buf + m;
1835: } else {
1836: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1837: jdxm = bufm;
1838: jdxn = bufn;
1839: }
1840: for (i = 0; i < m; i++) {
1841: for (j = 0; j < 3 - sdim; j++) dxm++;
1842: tmp = *dxm++ - starts[0];
1843: for (j = 0; j < dim - 1; j++) {
1844: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1845: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1846: }
1847: if (mat->stencil.noc) dxm++;
1848: jdxm[i] = tmp;
1849: }
1850: for (i = 0; i < n; i++) {
1851: for (j = 0; j < 3 - sdim; j++) dxn++;
1852: tmp = *dxn++ - starts[0];
1853: for (j = 0; j < dim - 1; j++) {
1854: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1855: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1856: }
1857: if (mat->stencil.noc) dxn++;
1858: jdxn[i] = tmp;
1859: }
1860: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1861: PetscCall(PetscFree2(bufm, bufn));
1862: PetscFunctionReturn(PETSC_SUCCESS);
1863: }
1865: /*@
1866: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1867: Using structured grid indexing
1869: Not Collective
1871: Input Parameters:
1872: + mat - the matrix
1873: . m - number of rows being entered
1874: . idxm - grid coordinates for matrix rows being entered
1875: . n - number of columns being entered
1876: . idxn - grid coordinates for matrix columns being entered
1877: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1878: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1879: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1881: Level: beginner
1883: Notes:
1884: By default the values, `v`, are row-oriented and unsorted.
1885: See `MatSetOption()` for other options.
1887: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1888: options cannot be mixed without intervening calls to the assembly
1889: routines.
1891: The grid coordinates are across the entire grid, not just the local portion
1893: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1894: as well as in C.
1896: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1898: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1899: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1901: The columns and rows in the stencil passed in MUST be contained within the
1902: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1903: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1904: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1905: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1907: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1908: simply ignored. This allows easily inserting element stiffness matrices
1909: with homogeneous Dirichlet boundary conditions that you don't want represented
1910: in the matrix.
1912: Inspired by the structured grid interface to the HYPRE package
1913: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1915: Fortran Notes:
1916: `idxm` and `idxn` should be declared as
1917: .vb
1918: MatStencil idxm(4,m),idxn(4,n)
1919: .ve
1920: and the values inserted using
1921: .vb
1922: idxm(MatStencil_i,1) = i
1923: idxm(MatStencil_j,1) = j
1924: idxm(MatStencil_k,1) = k
1925: etc
1926: .ve
1928: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1930: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1931: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1932: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1933: @*/
1934: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1935: {
1936: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1937: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1938: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1940: PetscFunctionBegin;
1941: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1944: PetscAssertPointer(idxm, 3);
1945: PetscAssertPointer(idxn, 5);
1946: PetscAssertPointer(v, 6);
1948: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1949: jdxm = buf;
1950: jdxn = buf + m;
1951: } else {
1952: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1953: jdxm = bufm;
1954: jdxn = bufn;
1955: }
1956: for (i = 0; i < m; i++) {
1957: for (j = 0; j < 3 - sdim; j++) dxm++;
1958: tmp = *dxm++ - starts[0];
1959: for (j = 0; j < sdim - 1; j++) {
1960: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1961: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1962: }
1963: dxm++;
1964: jdxm[i] = tmp;
1965: }
1966: for (i = 0; i < n; i++) {
1967: for (j = 0; j < 3 - sdim; j++) dxn++;
1968: tmp = *dxn++ - starts[0];
1969: for (j = 0; j < sdim - 1; j++) {
1970: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1971: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1972: }
1973: dxn++;
1974: jdxn[i] = tmp;
1975: }
1976: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1977: PetscCall(PetscFree2(bufm, bufn));
1978: PetscFunctionReturn(PETSC_SUCCESS);
1979: }
1981: /*@
1982: MatSetStencil - Sets the grid information for setting values into a matrix via
1983: `MatSetValuesStencil()`
1985: Not Collective
1987: Input Parameters:
1988: + mat - the matrix
1989: . dim - dimension of the grid 1, 2, or 3
1990: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1991: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1992: - dof - number of degrees of freedom per node
1994: Level: beginner
1996: Notes:
1997: Inspired by the structured grid interface to the HYPRE package
1998: (www.llnl.gov/CASC/hyper)
2000: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
2001: user.
2003: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
2004: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
2005: @*/
2006: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
2007: {
2008: PetscFunctionBegin;
2010: PetscAssertPointer(dims, 3);
2011: PetscAssertPointer(starts, 4);
2013: mat->stencil.dim = dim + (dof > 1);
2014: for (PetscInt i = 0; i < dim; i++) {
2015: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
2016: mat->stencil.starts[i] = starts[dim - i - 1];
2017: }
2018: mat->stencil.dims[dim] = dof;
2019: mat->stencil.starts[dim] = 0;
2020: mat->stencil.noc = (PetscBool)(dof == 1);
2021: PetscFunctionReturn(PETSC_SUCCESS);
2022: }
2024: /*@
2025: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
2027: Not Collective
2029: Input Parameters:
2030: + mat - the matrix
2031: . m - the number of block rows
2032: . idxm - the global block indices
2033: . n - the number of block columns
2034: . idxn - the global block indices
2035: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2036: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2037: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
2039: Level: intermediate
2041: Notes:
2042: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
2043: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
2045: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
2046: NOT the total number of rows/columns; for example, if the block size is 2 and
2047: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
2048: The values in `idxm` would be 1 2; that is the first index for each block divided by
2049: the block size.
2051: You must call `MatSetBlockSize()` when constructing this matrix (before
2052: preallocating it).
2054: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2056: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
2057: options cannot be mixed without intervening calls to the assembly
2058: routines.
2060: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
2061: as well as in C.
2063: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2064: simply ignored. This allows easily inserting element stiffness matrices
2065: with homogeneous Dirichlet boundary conditions that you don't want represented
2066: in the matrix.
2068: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2069: internal searching must be done to determine where to place the
2070: data in the matrix storage space. By instead inserting blocks of
2071: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2072: reduced.
2074: Example:
2075: .vb
2076: Suppose m=n=2 and block size(bs) = 2 The array is
2078: 1 2 | 3 4
2079: 5 6 | 7 8
2080: - - - | - - -
2081: 9 10 | 11 12
2082: 13 14 | 15 16
2084: v[] should be passed in like
2085: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2087: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2088: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2089: .ve
2091: Fortran Notes:
2092: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2093: .vb
2094: call MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
2095: .ve
2097: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2099: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2100: @*/
2101: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2102: {
2103: PetscFunctionBeginHot;
2106: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2107: PetscAssertPointer(idxm, 3);
2108: PetscAssertPointer(idxn, 5);
2109: MatCheckPreallocated(mat, 1);
2110: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2111: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2112: if (PetscDefined(USE_DEBUG)) {
2113: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2114: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2115: }
2116: if (PetscDefined(USE_DEBUG)) {
2117: PetscInt rbs, cbs, M, N, i;
2118: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2119: PetscCall(MatGetSize(mat, &M, &N));
2120: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2121: for (i = 0; i < n; i++)
2122: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2123: }
2124: if (mat->assembled) {
2125: mat->was_assembled = PETSC_TRUE;
2126: mat->assembled = PETSC_FALSE;
2127: }
2128: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2129: if (mat->ops->setvaluesblocked) {
2130: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2131: } else {
2132: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2133: PetscInt i, j, bs, cbs;
2135: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2136: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2137: iidxm = buf;
2138: iidxn = buf + m * bs;
2139: } else {
2140: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2141: iidxm = bufr;
2142: iidxn = bufc;
2143: }
2144: for (i = 0; i < m; i++) {
2145: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2146: }
2147: if (m != n || bs != cbs || idxm != idxn) {
2148: for (i = 0; i < n; i++) {
2149: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2150: }
2151: } else iidxn = iidxm;
2152: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2153: PetscCall(PetscFree2(bufr, bufc));
2154: }
2155: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2156: PetscFunctionReturn(PETSC_SUCCESS);
2157: }
2159: /*@
2160: MatGetValues - Gets a block of local values from a matrix.
2162: Not Collective; can only return values that are owned by the give process
2164: Input Parameters:
2165: + mat - the matrix
2166: . v - a logically two-dimensional array for storing the values
2167: . m - the number of rows
2168: . idxm - the global indices of the rows
2169: . n - the number of columns
2170: - idxn - the global indices of the columns
2172: Level: advanced
2174: Notes:
2175: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2176: The values, `v`, are then returned in a row-oriented format,
2177: analogous to that used by default in `MatSetValues()`.
2179: `MatGetValues()` uses 0-based row and column numbers in
2180: Fortran as well as in C.
2182: `MatGetValues()` requires that the matrix has been assembled
2183: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2184: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2185: without intermediate matrix assembly.
2187: Negative row or column indices will be ignored and those locations in `v` will be
2188: left unchanged.
2190: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2191: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2192: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2194: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2195: @*/
2196: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2197: {
2198: PetscFunctionBegin;
2201: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2202: PetscAssertPointer(idxm, 3);
2203: PetscAssertPointer(idxn, 5);
2204: PetscAssertPointer(v, 6);
2205: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2206: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2207: MatCheckPreallocated(mat, 1);
2209: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2210: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2211: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2212: PetscFunctionReturn(PETSC_SUCCESS);
2213: }
2215: /*@
2216: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2217: defined previously by `MatSetLocalToGlobalMapping()`
2219: Not Collective
2221: Input Parameters:
2222: + mat - the matrix
2223: . nrow - number of rows
2224: . irow - the row local indices
2225: . ncol - number of columns
2226: - icol - the column local indices
2228: Output Parameter:
2229: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2230: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2232: Level: advanced
2234: Notes:
2235: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2237: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2238: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2239: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2240: with `MatSetLocalToGlobalMapping()`.
2242: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2243: `MatSetValuesLocal()`, `MatGetValues()`
2244: @*/
2245: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2246: {
2247: PetscFunctionBeginHot;
2250: MatCheckPreallocated(mat, 1);
2251: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2252: PetscAssertPointer(irow, 3);
2253: PetscAssertPointer(icol, 5);
2254: if (PetscDefined(USE_DEBUG)) {
2255: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2256: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2257: }
2258: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2259: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2260: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2261: else {
2262: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2263: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2264: irowm = buf;
2265: icolm = buf + nrow;
2266: } else {
2267: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2268: irowm = bufr;
2269: icolm = bufc;
2270: }
2271: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2272: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2273: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2274: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2275: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2276: PetscCall(PetscFree2(bufr, bufc));
2277: }
2278: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2279: PetscFunctionReturn(PETSC_SUCCESS);
2280: }
2282: /*@
2283: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2284: the same size. Currently, this can only be called once and creates the given matrix.
2286: Not Collective
2288: Input Parameters:
2289: + mat - the matrix
2290: . nb - the number of blocks
2291: . bs - the number of rows (and columns) in each block
2292: . rows - a concatenation of the rows for each block
2293: - v - a concatenation of logically two-dimensional arrays of values
2295: Level: advanced
2297: Notes:
2298: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2300: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2302: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2303: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2304: @*/
2305: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2306: {
2307: PetscFunctionBegin;
2310: PetscAssertPointer(rows, 4);
2311: PetscAssertPointer(v, 5);
2312: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2314: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2315: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2316: else {
2317: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2318: }
2319: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2320: PetscFunctionReturn(PETSC_SUCCESS);
2321: }
2323: /*@
2324: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2325: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2326: using a local (per-processor) numbering.
2328: Not Collective
2330: Input Parameters:
2331: + x - the matrix
2332: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2333: - cmapping - column mapping
2335: Level: intermediate
2337: Note:
2338: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2340: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2341: @*/
2342: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2343: {
2344: PetscFunctionBegin;
2349: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2350: else {
2351: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2352: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2353: }
2354: PetscFunctionReturn(PETSC_SUCCESS);
2355: }
2357: /*@
2358: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2360: Not Collective
2362: Input Parameter:
2363: . A - the matrix
2365: Output Parameters:
2366: + rmapping - row mapping
2367: - cmapping - column mapping
2369: Level: advanced
2371: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2372: @*/
2373: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2374: {
2375: PetscFunctionBegin;
2378: if (rmapping) {
2379: PetscAssertPointer(rmapping, 2);
2380: *rmapping = A->rmap->mapping;
2381: }
2382: if (cmapping) {
2383: PetscAssertPointer(cmapping, 3);
2384: *cmapping = A->cmap->mapping;
2385: }
2386: PetscFunctionReturn(PETSC_SUCCESS);
2387: }
2389: /*@
2390: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2392: Logically Collective
2394: Input Parameters:
2395: + A - the matrix
2396: . rmap - row layout
2397: - cmap - column layout
2399: Level: advanced
2401: Note:
2402: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2404: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2405: @*/
2406: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2407: {
2408: PetscFunctionBegin;
2410: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2411: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2412: PetscFunctionReturn(PETSC_SUCCESS);
2413: }
2415: /*@
2416: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2418: Not Collective
2420: Input Parameter:
2421: . A - the matrix
2423: Output Parameters:
2424: + rmap - row layout
2425: - cmap - column layout
2427: Level: advanced
2429: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2430: @*/
2431: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2432: {
2433: PetscFunctionBegin;
2436: if (rmap) {
2437: PetscAssertPointer(rmap, 2);
2438: *rmap = A->rmap;
2439: }
2440: if (cmap) {
2441: PetscAssertPointer(cmap, 3);
2442: *cmap = A->cmap;
2443: }
2444: PetscFunctionReturn(PETSC_SUCCESS);
2445: }
2447: /*@
2448: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2449: using a local numbering of the rows and columns.
2451: Not Collective
2453: Input Parameters:
2454: + mat - the matrix
2455: . nrow - number of rows
2456: . irow - the row local indices
2457: . ncol - number of columns
2458: . icol - the column local indices
2459: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2460: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2461: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2463: Level: intermediate
2465: Notes:
2466: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2468: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2469: options cannot be mixed without intervening calls to the assembly
2470: routines.
2472: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2473: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2475: Fortran Notes:
2476: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2477: .vb
2478: call MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2479: .ve
2481: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2483: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2484: `MatGetValuesLocal()`
2485: @*/
2486: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2487: {
2488: PetscFunctionBeginHot;
2491: MatCheckPreallocated(mat, 1);
2492: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2493: PetscAssertPointer(irow, 3);
2494: PetscAssertPointer(icol, 5);
2495: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2496: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2497: if (PetscDefined(USE_DEBUG)) {
2498: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2499: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2500: }
2502: if (mat->assembled) {
2503: mat->was_assembled = PETSC_TRUE;
2504: mat->assembled = PETSC_FALSE;
2505: }
2506: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2507: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2508: else {
2509: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2510: const PetscInt *irowm, *icolm;
2512: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2513: bufr = buf;
2514: bufc = buf + nrow;
2515: irowm = bufr;
2516: icolm = bufc;
2517: } else {
2518: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2519: irowm = bufr;
2520: icolm = bufc;
2521: }
2522: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2523: else irowm = irow;
2524: if (mat->cmap->mapping) {
2525: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2526: else icolm = irowm;
2527: } else icolm = icol;
2528: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2529: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2530: }
2531: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2532: PetscFunctionReturn(PETSC_SUCCESS);
2533: }
2535: /*@
2536: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2537: using a local ordering of the nodes a block at a time.
2539: Not Collective
2541: Input Parameters:
2542: + mat - the matrix
2543: . nrow - number of rows
2544: . irow - the row local indices
2545: . ncol - number of columns
2546: . icol - the column local indices
2547: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2548: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2549: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2551: Level: intermediate
2553: Notes:
2554: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2555: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2557: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2558: options cannot be mixed without intervening calls to the assembly
2559: routines.
2561: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2562: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2564: Fortran Notes:
2565: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2566: .vb
2567: call MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2568: .ve
2570: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2572: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2573: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2574: @*/
2575: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2576: {
2577: PetscFunctionBeginHot;
2580: MatCheckPreallocated(mat, 1);
2581: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2582: PetscAssertPointer(irow, 3);
2583: PetscAssertPointer(icol, 5);
2584: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2585: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2586: if (PetscDefined(USE_DEBUG)) {
2587: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2588: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2589: }
2591: if (mat->assembled) {
2592: mat->was_assembled = PETSC_TRUE;
2593: mat->assembled = PETSC_FALSE;
2594: }
2595: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2596: PetscInt irbs, rbs;
2597: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2598: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2599: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2600: }
2601: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2602: PetscInt icbs, cbs;
2603: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2604: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2605: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2606: }
2607: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2608: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2609: else {
2610: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2611: const PetscInt *irowm, *icolm;
2613: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2614: bufr = buf;
2615: bufc = buf + nrow;
2616: irowm = bufr;
2617: icolm = bufc;
2618: } else {
2619: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2620: irowm = bufr;
2621: icolm = bufc;
2622: }
2623: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2624: else irowm = irow;
2625: if (mat->cmap->mapping) {
2626: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2627: else icolm = irowm;
2628: } else icolm = icol;
2629: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2630: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2631: }
2632: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2633: PetscFunctionReturn(PETSC_SUCCESS);
2634: }
2636: /*@
2637: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2639: Collective
2641: Input Parameters:
2642: + mat - the matrix
2643: - x - the vector to be multiplied
2645: Output Parameter:
2646: . y - the result
2648: Level: developer
2650: Note:
2651: The vectors `x` and `y` cannot be the same. I.e., one cannot
2652: call `MatMultDiagonalBlock`(A,y,y).
2654: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2655: @*/
2656: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2657: {
2658: PetscFunctionBegin;
2664: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2665: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2666: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2667: MatCheckPreallocated(mat, 1);
2669: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2670: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2671: PetscFunctionReturn(PETSC_SUCCESS);
2672: }
2674: /*@
2675: MatMult - Computes the matrix-vector product, $y = Ax$.
2677: Neighbor-wise Collective
2679: Input Parameters:
2680: + mat - the matrix
2681: - x - the vector to be multiplied
2683: Output Parameter:
2684: . y - the result
2686: Level: beginner
2688: Note:
2689: The vectors `x` and `y` cannot be the same. I.e., one cannot
2690: call `MatMult`(A,y,y).
2692: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2693: @*/
2694: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2695: {
2696: PetscFunctionBegin;
2700: VecCheckAssembled(x);
2702: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2703: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2704: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2705: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2706: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2707: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2708: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2709: PetscCall(VecSetErrorIfLocked(y, 3));
2710: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2711: MatCheckPreallocated(mat, 1);
2713: PetscCall(VecLockReadPush(x));
2714: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2715: PetscUseTypeMethod(mat, mult, x, y);
2716: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2717: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2718: PetscCall(VecLockReadPop(x));
2719: PetscFunctionReturn(PETSC_SUCCESS);
2720: }
2722: /*@
2723: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2725: Neighbor-wise Collective
2727: Input Parameters:
2728: + mat - the matrix
2729: - x - the vector to be multiplied
2731: Output Parameter:
2732: . y - the result
2734: Level: beginner
2736: Notes:
2737: The vectors `x` and `y` cannot be the same. I.e., one cannot
2738: call `MatMultTranspose`(A,y,y).
2740: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2741: use `MatMultHermitianTranspose()`
2743: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2744: @*/
2745: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2746: {
2747: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2749: PetscFunctionBegin;
2753: VecCheckAssembled(x);
2756: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2757: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2758: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2759: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2760: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2761: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2762: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2763: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2764: MatCheckPreallocated(mat, 1);
2766: if (!mat->ops->multtranspose) {
2767: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2768: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2769: } else op = mat->ops->multtranspose;
2770: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2771: PetscCall(VecLockReadPush(x));
2772: PetscCall((*op)(mat, x, y));
2773: PetscCall(VecLockReadPop(x));
2774: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2775: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2776: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2777: PetscFunctionReturn(PETSC_SUCCESS);
2778: }
2780: /*@
2781: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2783: Neighbor-wise Collective
2785: Input Parameters:
2786: + mat - the matrix
2787: - x - the vector to be multiplied
2789: Output Parameter:
2790: . y - the result
2792: Level: beginner
2794: Notes:
2795: The vectors `x` and `y` cannot be the same. I.e., one cannot
2796: call `MatMultHermitianTranspose`(A,y,y).
2798: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2800: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2802: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2803: @*/
2804: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2805: {
2806: PetscFunctionBegin;
2812: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2813: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2814: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2815: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2816: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2817: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2818: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2819: MatCheckPreallocated(mat, 1);
2821: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2822: #if defined(PETSC_USE_COMPLEX)
2823: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2824: PetscCall(VecLockReadPush(x));
2825: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2826: else PetscUseTypeMethod(mat, mult, x, y);
2827: PetscCall(VecLockReadPop(x));
2828: } else {
2829: Vec w;
2830: PetscCall(VecDuplicate(x, &w));
2831: PetscCall(VecCopy(x, w));
2832: PetscCall(VecConjugate(w));
2833: PetscCall(MatMultTranspose(mat, w, y));
2834: PetscCall(VecDestroy(&w));
2835: PetscCall(VecConjugate(y));
2836: }
2837: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2838: #else
2839: PetscCall(MatMultTranspose(mat, x, y));
2840: #endif
2841: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2842: PetscFunctionReturn(PETSC_SUCCESS);
2843: }
2845: /*@
2846: MatMultAdd - Computes $v3 = v2 + A * v1$.
2848: Neighbor-wise Collective
2850: Input Parameters:
2851: + mat - the matrix
2852: . v1 - the vector to be multiplied by `mat`
2853: - v2 - the vector to be added to the result
2855: Output Parameter:
2856: . v3 - the result
2858: Level: beginner
2860: Note:
2861: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2862: call `MatMultAdd`(A,v1,v2,v1).
2864: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2865: @*/
2866: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2867: {
2868: PetscFunctionBegin;
2875: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2876: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2877: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2878: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2879: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2880: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2881: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2882: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2883: MatCheckPreallocated(mat, 1);
2885: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2886: PetscCall(VecLockReadPush(v1));
2887: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2888: PetscCall(VecLockReadPop(v1));
2889: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2890: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2891: PetscFunctionReturn(PETSC_SUCCESS);
2892: }
2894: /*@
2895: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2897: Neighbor-wise Collective
2899: Input Parameters:
2900: + mat - the matrix
2901: . v1 - the vector to be multiplied by the transpose of the matrix
2902: - v2 - the vector to be added to the result
2904: Output Parameter:
2905: . v3 - the result
2907: Level: beginner
2909: Note:
2910: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2911: call `MatMultTransposeAdd`(A,v1,v2,v1).
2913: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2914: @*/
2915: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2916: {
2917: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2919: PetscFunctionBegin;
2926: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2927: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2928: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2929: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2930: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2931: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2932: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2933: MatCheckPreallocated(mat, 1);
2935: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2936: PetscCall(VecLockReadPush(v1));
2937: PetscCall((*op)(mat, v1, v2, v3));
2938: PetscCall(VecLockReadPop(v1));
2939: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2940: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2941: PetscFunctionReturn(PETSC_SUCCESS);
2942: }
2944: /*@
2945: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2947: Neighbor-wise Collective
2949: Input Parameters:
2950: + mat - the matrix
2951: . v1 - the vector to be multiplied by the Hermitian transpose
2952: - v2 - the vector to be added to the result
2954: Output Parameter:
2955: . v3 - the result
2957: Level: beginner
2959: Note:
2960: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2961: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2963: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2964: @*/
2965: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2966: {
2967: PetscFunctionBegin;
2974: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2975: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2976: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2977: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2978: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2979: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2980: MatCheckPreallocated(mat, 1);
2982: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2983: PetscCall(VecLockReadPush(v1));
2984: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2985: else {
2986: Vec w, z;
2987: PetscCall(VecDuplicate(v1, &w));
2988: PetscCall(VecCopy(v1, w));
2989: PetscCall(VecConjugate(w));
2990: PetscCall(VecDuplicate(v3, &z));
2991: PetscCall(MatMultTranspose(mat, w, z));
2992: PetscCall(VecDestroy(&w));
2993: PetscCall(VecConjugate(z));
2994: if (v2 != v3) {
2995: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2996: } else {
2997: PetscCall(VecAXPY(v3, 1.0, z));
2998: }
2999: PetscCall(VecDestroy(&z));
3000: }
3001: PetscCall(VecLockReadPop(v1));
3002: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
3003: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
3004: PetscFunctionReturn(PETSC_SUCCESS);
3005: }
3007: /*@
3008: MatGetFactorType - gets the type of factorization a matrix is
3010: Not Collective
3012: Input Parameter:
3013: . mat - the matrix
3015: Output Parameter:
3016: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3018: Level: intermediate
3020: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3021: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3022: @*/
3023: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
3024: {
3025: PetscFunctionBegin;
3028: PetscAssertPointer(t, 2);
3029: *t = mat->factortype;
3030: PetscFunctionReturn(PETSC_SUCCESS);
3031: }
3033: /*@
3034: MatSetFactorType - sets the type of factorization a matrix is
3036: Logically Collective
3038: Input Parameters:
3039: + mat - the matrix
3040: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3042: Level: intermediate
3044: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3045: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3046: @*/
3047: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
3048: {
3049: PetscFunctionBegin;
3052: mat->factortype = t;
3053: PetscFunctionReturn(PETSC_SUCCESS);
3054: }
3056: /*@
3057: MatGetInfo - Returns information about matrix storage (number of
3058: nonzeros, memory, etc.).
3060: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3062: Input Parameters:
3063: + mat - the matrix
3064: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3066: Output Parameter:
3067: . info - matrix information context
3069: Options Database Key:
3070: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3072: Level: intermediate
3074: Notes:
3075: The `MatInfo` context contains a variety of matrix data, including
3076: number of nonzeros allocated and used, number of mallocs during
3077: matrix assembly, etc. Additional information for factored matrices
3078: is provided (such as the fill ratio, number of mallocs during
3079: factorization, etc.).
3081: Example:
3082: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3083: data within the `MatInfo` context. For example,
3084: .vb
3085: MatInfo info;
3086: Mat A;
3087: double mal, nz_a, nz_u;
3089: MatGetInfo(A, MAT_LOCAL, &info);
3090: mal = info.mallocs;
3091: nz_a = info.nz_allocated;
3092: .ve
3094: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3095: @*/
3096: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3097: {
3098: PetscFunctionBegin;
3101: PetscAssertPointer(info, 3);
3102: MatCheckPreallocated(mat, 1);
3103: PetscUseTypeMethod(mat, getinfo, flag, info);
3104: PetscFunctionReturn(PETSC_SUCCESS);
3105: }
3107: /*
3108: This is used by external packages where it is not easy to get the info from the actual
3109: matrix factorization.
3110: */
3111: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3112: {
3113: PetscFunctionBegin;
3114: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3115: PetscFunctionReturn(PETSC_SUCCESS);
3116: }
3118: /*@
3119: MatLUFactor - Performs in-place LU factorization of matrix.
3121: Collective
3123: Input Parameters:
3124: + mat - the matrix
3125: . row - row permutation
3126: . col - column permutation
3127: - info - options for factorization, includes
3128: .vb
3129: fill - expected fill as ratio of original fill.
3130: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3131: Run with the option -info to determine an optimal value to use
3132: .ve
3134: Level: developer
3136: Notes:
3137: Most users should employ the `KSP` interface for linear solvers
3138: instead of working directly with matrix algebra routines such as this.
3139: See, e.g., `KSPCreate()`.
3141: This changes the state of the matrix to a factored matrix; it cannot be used
3142: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3144: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3145: when not using `KSP`.
3147: Fortran Note:
3148: A valid (non-null) `info` argument must be provided
3150: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3151: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3152: @*/
3153: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3154: {
3155: MatFactorInfo tinfo;
3157: PetscFunctionBegin;
3161: if (info) PetscAssertPointer(info, 4);
3163: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3164: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3165: MatCheckPreallocated(mat, 1);
3166: if (!info) {
3167: PetscCall(MatFactorInfoInitialize(&tinfo));
3168: info = &tinfo;
3169: }
3171: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3172: PetscUseTypeMethod(mat, lufactor, row, col, info);
3173: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3174: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3175: PetscFunctionReturn(PETSC_SUCCESS);
3176: }
3178: /*@
3179: MatILUFactor - Performs in-place ILU factorization of matrix.
3181: Collective
3183: Input Parameters:
3184: + mat - the matrix
3185: . row - row permutation
3186: . col - column permutation
3187: - info - structure containing
3188: .vb
3189: levels - number of levels of fill.
3190: expected fill - as ratio of original fill.
3191: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3192: missing diagonal entries)
3193: .ve
3195: Level: developer
3197: Notes:
3198: Most users should employ the `KSP` interface for linear solvers
3199: instead of working directly with matrix algebra routines such as this.
3200: See, e.g., `KSPCreate()`.
3202: Probably really in-place only when level of fill is zero, otherwise allocates
3203: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3204: when not using `KSP`.
3206: Fortran Note:
3207: A valid (non-null) `info` argument must be provided
3209: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3210: @*/
3211: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3212: {
3213: PetscFunctionBegin;
3217: PetscAssertPointer(info, 4);
3219: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3220: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3221: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3222: MatCheckPreallocated(mat, 1);
3224: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3225: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3226: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3227: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3228: PetscFunctionReturn(PETSC_SUCCESS);
3229: }
3231: /*@
3232: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3233: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3235: Collective
3237: Input Parameters:
3238: + fact - the factor matrix obtained with `MatGetFactor()`
3239: . mat - the matrix
3240: . row - the row permutation
3241: . col - the column permutation
3242: - info - options for factorization, includes
3243: .vb
3244: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3245: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3246: .ve
3248: Level: developer
3250: Notes:
3251: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3253: Most users should employ the simplified `KSP` interface for linear solvers
3254: instead of working directly with matrix algebra routines such as this.
3255: See, e.g., `KSPCreate()`.
3257: Fortran Note:
3258: A valid (non-null) `info` argument must be provided
3260: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3261: @*/
3262: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3263: {
3264: MatFactorInfo tinfo;
3266: PetscFunctionBegin;
3271: if (info) PetscAssertPointer(info, 5);
3274: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3275: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3276: MatCheckPreallocated(mat, 2);
3277: if (!info) {
3278: PetscCall(MatFactorInfoInitialize(&tinfo));
3279: info = &tinfo;
3280: }
3282: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3283: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3284: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3285: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3286: PetscFunctionReturn(PETSC_SUCCESS);
3287: }
3289: /*@
3290: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3291: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3293: Collective
3295: Input Parameters:
3296: + fact - the factor matrix obtained with `MatGetFactor()`
3297: . mat - the matrix
3298: - info - options for factorization
3300: Level: developer
3302: Notes:
3303: See `MatLUFactor()` for in-place factorization. See
3304: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3306: Most users should employ the `KSP` interface for linear solvers
3307: instead of working directly with matrix algebra routines such as this.
3308: See, e.g., `KSPCreate()`.
3310: Fortran Note:
3311: A valid (non-null) `info` argument must be provided
3313: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3314: @*/
3315: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3316: {
3317: MatFactorInfo tinfo;
3319: PetscFunctionBegin;
3324: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3325: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3326: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3328: MatCheckPreallocated(mat, 2);
3329: if (!info) {
3330: PetscCall(MatFactorInfoInitialize(&tinfo));
3331: info = &tinfo;
3332: }
3334: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3335: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3336: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3337: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3338: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3339: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3340: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3341: PetscFunctionReturn(PETSC_SUCCESS);
3342: }
3344: /*@
3345: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3346: symmetric matrix.
3348: Collective
3350: Input Parameters:
3351: + mat - the matrix
3352: . perm - row and column permutations
3353: - info - expected fill as ratio of original fill
3355: Level: developer
3357: Notes:
3358: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3359: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3361: Most users should employ the `KSP` interface for linear solvers
3362: instead of working directly with matrix algebra routines such as this.
3363: See, e.g., `KSPCreate()`.
3365: Fortran Note:
3366: A valid (non-null) `info` argument must be provided
3368: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3369: `MatGetOrdering()`
3370: @*/
3371: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3372: {
3373: MatFactorInfo tinfo;
3375: PetscFunctionBegin;
3378: if (info) PetscAssertPointer(info, 3);
3380: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3381: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3382: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3383: MatCheckPreallocated(mat, 1);
3384: if (!info) {
3385: PetscCall(MatFactorInfoInitialize(&tinfo));
3386: info = &tinfo;
3387: }
3389: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3390: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3391: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3392: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3393: PetscFunctionReturn(PETSC_SUCCESS);
3394: }
3396: /*@
3397: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3398: of a symmetric matrix.
3400: Collective
3402: Input Parameters:
3403: + fact - the factor matrix obtained with `MatGetFactor()`
3404: . mat - the matrix
3405: . perm - row and column permutations
3406: - info - options for factorization, includes
3407: .vb
3408: fill - expected fill as ratio of original fill.
3409: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3410: Run with the option -info to determine an optimal value to use
3411: .ve
3413: Level: developer
3415: Notes:
3416: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3417: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3419: Most users should employ the `KSP` interface for linear solvers
3420: instead of working directly with matrix algebra routines such as this.
3421: See, e.g., `KSPCreate()`.
3423: Fortran Note:
3424: A valid (non-null) `info` argument must be provided
3426: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3427: `MatGetOrdering()`
3428: @*/
3429: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3430: {
3431: MatFactorInfo tinfo;
3433: PetscFunctionBegin;
3437: if (info) PetscAssertPointer(info, 4);
3440: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3441: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3442: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3443: MatCheckPreallocated(mat, 2);
3444: if (!info) {
3445: PetscCall(MatFactorInfoInitialize(&tinfo));
3446: info = &tinfo;
3447: }
3449: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3450: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3451: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3452: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3453: PetscFunctionReturn(PETSC_SUCCESS);
3454: }
3456: /*@
3457: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3458: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3459: `MatCholeskyFactorSymbolic()`.
3461: Collective
3463: Input Parameters:
3464: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3465: . mat - the initial matrix that is to be factored
3466: - info - options for factorization
3468: Level: developer
3470: Note:
3471: Most users should employ the `KSP` interface for linear solvers
3472: instead of working directly with matrix algebra routines such as this.
3473: See, e.g., `KSPCreate()`.
3475: Fortran Note:
3476: A valid (non-null) `info` argument must be provided
3478: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3479: @*/
3480: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3481: {
3482: MatFactorInfo tinfo;
3484: PetscFunctionBegin;
3489: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3490: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3491: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3492: MatCheckPreallocated(mat, 2);
3493: if (!info) {
3494: PetscCall(MatFactorInfoInitialize(&tinfo));
3495: info = &tinfo;
3496: }
3498: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3499: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3500: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3501: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3502: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3503: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3504: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3505: PetscFunctionReturn(PETSC_SUCCESS);
3506: }
3508: /*@
3509: MatQRFactor - Performs in-place QR factorization of matrix.
3511: Collective
3513: Input Parameters:
3514: + mat - the matrix
3515: . col - column permutation
3516: - info - options for factorization, includes
3517: .vb
3518: fill - expected fill as ratio of original fill.
3519: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3520: Run with the option -info to determine an optimal value to use
3521: .ve
3523: Level: developer
3525: Notes:
3526: Most users should employ the `KSP` interface for linear solvers
3527: instead of working directly with matrix algebra routines such as this.
3528: See, e.g., `KSPCreate()`.
3530: This changes the state of the matrix to a factored matrix; it cannot be used
3531: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3533: Fortran Note:
3534: A valid (non-null) `info` argument must be provided
3536: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3537: `MatSetUnfactored()`
3538: @*/
3539: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3540: {
3541: PetscFunctionBegin;
3544: if (info) PetscAssertPointer(info, 3);
3546: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3547: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3548: MatCheckPreallocated(mat, 1);
3549: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3550: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3551: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3552: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3553: PetscFunctionReturn(PETSC_SUCCESS);
3554: }
3556: /*@
3557: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3558: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3560: Collective
3562: Input Parameters:
3563: + fact - the factor matrix obtained with `MatGetFactor()`
3564: . mat - the matrix
3565: . col - column permutation
3566: - info - options for factorization, includes
3567: .vb
3568: fill - expected fill as ratio of original fill.
3569: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3570: Run with the option -info to determine an optimal value to use
3571: .ve
3573: Level: developer
3575: Note:
3576: Most users should employ the `KSP` interface for linear solvers
3577: instead of working directly with matrix algebra routines such as this.
3578: See, e.g., `KSPCreate()`.
3580: Fortran Note:
3581: A valid (non-null) `info` argument must be provided
3583: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3584: @*/
3585: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3586: {
3587: MatFactorInfo tinfo;
3589: PetscFunctionBegin;
3593: if (info) PetscAssertPointer(info, 4);
3596: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3597: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3598: MatCheckPreallocated(mat, 2);
3599: if (!info) {
3600: PetscCall(MatFactorInfoInitialize(&tinfo));
3601: info = &tinfo;
3602: }
3604: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3605: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3606: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3607: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3608: PetscFunctionReturn(PETSC_SUCCESS);
3609: }
3611: /*@
3612: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3613: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3615: Collective
3617: Input Parameters:
3618: + fact - the factor matrix obtained with `MatGetFactor()`
3619: . mat - the matrix
3620: - info - options for factorization
3622: Level: developer
3624: Notes:
3625: See `MatQRFactor()` for in-place factorization.
3627: Most users should employ the `KSP` interface for linear solvers
3628: instead of working directly with matrix algebra routines such as this.
3629: See, e.g., `KSPCreate()`.
3631: Fortran Note:
3632: A valid (non-null) `info` argument must be provided
3634: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3635: @*/
3636: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3637: {
3638: MatFactorInfo tinfo;
3640: PetscFunctionBegin;
3645: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3646: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3647: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3649: MatCheckPreallocated(mat, 2);
3650: if (!info) {
3651: PetscCall(MatFactorInfoInitialize(&tinfo));
3652: info = &tinfo;
3653: }
3655: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3656: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3657: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3658: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3659: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3660: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3661: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3662: PetscFunctionReturn(PETSC_SUCCESS);
3663: }
3665: /*@
3666: MatSolve - Solves $A x = b$, given a factored matrix.
3668: Neighbor-wise Collective
3670: Input Parameters:
3671: + mat - the factored matrix
3672: - b - the right-hand-side vector
3674: Output Parameter:
3675: . x - the result vector
3677: Level: developer
3679: Notes:
3680: The vectors `b` and `x` cannot be the same. I.e., one cannot
3681: call `MatSolve`(A,x,x).
3683: Most users should employ the `KSP` interface for linear solvers
3684: instead of working directly with matrix algebra routines such as this.
3685: See, e.g., `KSPCreate()`.
3687: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3688: @*/
3689: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3690: {
3691: PetscFunctionBegin;
3696: PetscCheckSameComm(mat, 1, b, 2);
3697: PetscCheckSameComm(mat, 1, x, 3);
3698: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3699: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3700: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3701: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3702: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3703: MatCheckPreallocated(mat, 1);
3705: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3706: PetscCall(VecFlag(x, mat->factorerrortype));
3707: if (mat->factorerrortype) PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3708: else PetscUseTypeMethod(mat, solve, b, x);
3709: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3710: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3711: PetscFunctionReturn(PETSC_SUCCESS);
3712: }
3714: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3715: {
3716: Vec b, x;
3717: PetscInt N, i;
3718: PetscErrorCode (*f)(Mat, Vec, Vec);
3719: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3721: PetscFunctionBegin;
3722: if (A->factorerrortype) {
3723: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3724: PetscCall(MatSetInf(X));
3725: PetscFunctionReturn(PETSC_SUCCESS);
3726: }
3727: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3728: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3729: PetscCall(MatBoundToCPU(A, &Abound));
3730: if (!Abound) {
3731: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3732: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3733: }
3734: #if PetscDefined(HAVE_CUDA)
3735: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3736: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3737: #elif PetscDefined(HAVE_HIP)
3738: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3739: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3740: #endif
3741: PetscCall(MatGetSize(B, NULL, &N));
3742: for (i = 0; i < N; i++) {
3743: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3744: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3745: PetscCall((*f)(A, b, x));
3746: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3747: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3748: }
3749: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3750: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3751: PetscFunctionReturn(PETSC_SUCCESS);
3752: }
3754: /*@
3755: MatMatSolve - Solves $A X = B$, given a factored matrix.
3757: Neighbor-wise Collective
3759: Input Parameters:
3760: + A - the factored matrix
3761: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3763: Output Parameter:
3764: . X - the result matrix (dense matrix)
3766: Level: developer
3768: Note:
3769: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3770: otherwise, `B` and `X` cannot be the same.
3772: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3773: @*/
3774: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3775: {
3776: PetscFunctionBegin;
3781: PetscCheckSameComm(A, 1, B, 2);
3782: PetscCheckSameComm(A, 1, X, 3);
3783: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3784: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3785: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3786: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3787: MatCheckPreallocated(A, 1);
3789: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3790: if (!A->ops->matsolve) {
3791: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3792: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3793: } else PetscUseTypeMethod(A, matsolve, B, X);
3794: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3795: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3796: PetscFunctionReturn(PETSC_SUCCESS);
3797: }
3799: /*@
3800: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3802: Neighbor-wise Collective
3804: Input Parameters:
3805: + A - the factored matrix
3806: - B - the right-hand-side matrix (`MATDENSE` matrix)
3808: Output Parameter:
3809: . X - the result matrix (dense matrix)
3811: Level: developer
3813: Note:
3814: The matrices `B` and `X` cannot be the same. I.e., one cannot
3815: call `MatMatSolveTranspose`(A,X,X).
3817: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3818: @*/
3819: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3820: {
3821: PetscFunctionBegin;
3826: PetscCheckSameComm(A, 1, B, 2);
3827: PetscCheckSameComm(A, 1, X, 3);
3828: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3829: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3830: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3831: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3832: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3833: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3834: MatCheckPreallocated(A, 1);
3836: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3837: if (!A->ops->matsolvetranspose) {
3838: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3839: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3840: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3841: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3842: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3843: PetscFunctionReturn(PETSC_SUCCESS);
3844: }
3846: /*@
3847: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3849: Neighbor-wise Collective
3851: Input Parameters:
3852: + A - the factored matrix
3853: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3855: Output Parameter:
3856: . X - the result matrix (dense matrix)
3858: Level: developer
3860: Note:
3861: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3862: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3864: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3865: @*/
3866: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3867: {
3868: PetscFunctionBegin;
3873: PetscCheckSameComm(A, 1, Bt, 2);
3874: PetscCheckSameComm(A, 1, X, 3);
3876: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3877: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3878: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3879: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3880: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3881: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3882: MatCheckPreallocated(A, 1);
3884: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3885: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3886: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3887: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3888: PetscFunctionReturn(PETSC_SUCCESS);
3889: }
3891: /*@
3892: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3893: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3895: Neighbor-wise Collective
3897: Input Parameters:
3898: + mat - the factored matrix
3899: - b - the right-hand-side vector
3901: Output Parameter:
3902: . x - the result vector
3904: Level: developer
3906: Notes:
3907: `MatSolve()` should be used for most applications, as it performs
3908: a forward solve followed by a backward solve.
3910: The vectors `b` and `x` cannot be the same, i.e., one cannot
3911: call `MatForwardSolve`(A,x,x).
3913: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3914: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3915: `MatForwardSolve()` solves $U^T*D y = b$, and
3916: `MatBackwardSolve()` solves $U x = y$.
3917: Thus they do not provide a symmetric preconditioner.
3919: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3920: @*/
3921: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3922: {
3923: PetscFunctionBegin;
3928: PetscCheckSameComm(mat, 1, b, 2);
3929: PetscCheckSameComm(mat, 1, x, 3);
3930: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3931: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3932: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3933: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3934: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3935: MatCheckPreallocated(mat, 1);
3937: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3938: PetscUseTypeMethod(mat, forwardsolve, b, x);
3939: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3940: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3941: PetscFunctionReturn(PETSC_SUCCESS);
3942: }
3944: /*@
3945: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3946: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3948: Neighbor-wise Collective
3950: Input Parameters:
3951: + mat - the factored matrix
3952: - b - the right-hand-side vector
3954: Output Parameter:
3955: . x - the result vector
3957: Level: developer
3959: Notes:
3960: `MatSolve()` should be used for most applications, as it performs
3961: a forward solve followed by a backward solve.
3963: The vectors `b` and `x` cannot be the same. I.e., one cannot
3964: call `MatBackwardSolve`(A,x,x).
3966: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3967: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3968: `MatForwardSolve()` solves $U^T*D y = b$, and
3969: `MatBackwardSolve()` solves $U x = y$.
3970: Thus they do not provide a symmetric preconditioner.
3972: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3973: @*/
3974: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3975: {
3976: PetscFunctionBegin;
3981: PetscCheckSameComm(mat, 1, b, 2);
3982: PetscCheckSameComm(mat, 1, x, 3);
3983: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3984: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3985: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3986: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3987: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3988: MatCheckPreallocated(mat, 1);
3990: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3991: PetscUseTypeMethod(mat, backwardsolve, b, x);
3992: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3993: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3994: PetscFunctionReturn(PETSC_SUCCESS);
3995: }
3997: /*@
3998: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
4000: Neighbor-wise Collective
4002: Input Parameters:
4003: + mat - the factored matrix
4004: . b - the right-hand-side vector
4005: - y - the vector to be added to
4007: Output Parameter:
4008: . x - the result vector
4010: Level: developer
4012: Note:
4013: The vectors `b` and `x` cannot be the same. I.e., one cannot
4014: call `MatSolveAdd`(A,x,y,x).
4016: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
4017: @*/
4018: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
4019: {
4020: PetscScalar one = 1.0;
4021: Vec tmp;
4023: PetscFunctionBegin;
4029: PetscCheckSameComm(mat, 1, b, 2);
4030: PetscCheckSameComm(mat, 1, y, 3);
4031: PetscCheckSameComm(mat, 1, x, 4);
4032: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4033: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4034: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4035: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4036: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4037: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4038: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4039: MatCheckPreallocated(mat, 1);
4041: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4042: PetscCall(VecFlag(x, mat->factorerrortype));
4043: if (mat->factorerrortype) {
4044: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4045: } else if (mat->ops->solveadd) {
4046: PetscUseTypeMethod(mat, solveadd, b, y, x);
4047: } else {
4048: /* do the solve then the add manually */
4049: if (x != y) {
4050: PetscCall(MatSolve(mat, b, x));
4051: PetscCall(VecAXPY(x, one, y));
4052: } else {
4053: PetscCall(VecDuplicate(x, &tmp));
4054: PetscCall(VecCopy(x, tmp));
4055: PetscCall(MatSolve(mat, b, x));
4056: PetscCall(VecAXPY(x, one, tmp));
4057: PetscCall(VecDestroy(&tmp));
4058: }
4059: }
4060: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4061: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4062: PetscFunctionReturn(PETSC_SUCCESS);
4063: }
4065: /*@
4066: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4068: Neighbor-wise Collective
4070: Input Parameters:
4071: + mat - the factored matrix
4072: - b - the right-hand-side vector
4074: Output Parameter:
4075: . x - the result vector
4077: Level: developer
4079: Notes:
4080: The vectors `b` and `x` cannot be the same. I.e., one cannot
4081: call `MatSolveTranspose`(A,x,x).
4083: Most users should employ the `KSP` interface for linear solvers
4084: instead of working directly with matrix algebra routines such as this.
4085: See, e.g., `KSPCreate()`.
4087: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4088: @*/
4089: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4090: {
4091: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4093: PetscFunctionBegin;
4098: PetscCheckSameComm(mat, 1, b, 2);
4099: PetscCheckSameComm(mat, 1, x, 3);
4100: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4101: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4102: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4103: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4104: MatCheckPreallocated(mat, 1);
4105: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4106: PetscCall(VecFlag(x, mat->factorerrortype));
4107: if (mat->factorerrortype) {
4108: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4109: } else {
4110: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4111: PetscCall((*f)(mat, b, x));
4112: }
4113: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4114: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4115: PetscFunctionReturn(PETSC_SUCCESS);
4116: }
4118: /*@
4119: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4120: factored matrix.
4122: Neighbor-wise Collective
4124: Input Parameters:
4125: + mat - the factored matrix
4126: . b - the right-hand-side vector
4127: - y - the vector to be added to
4129: Output Parameter:
4130: . x - the result vector
4132: Level: developer
4134: Note:
4135: The vectors `b` and `x` cannot be the same. I.e., one cannot
4136: call `MatSolveTransposeAdd`(A,x,y,x).
4138: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4139: @*/
4140: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4141: {
4142: PetscScalar one = 1.0;
4143: Vec tmp;
4144: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4146: PetscFunctionBegin;
4152: PetscCheckSameComm(mat, 1, b, 2);
4153: PetscCheckSameComm(mat, 1, y, 3);
4154: PetscCheckSameComm(mat, 1, x, 4);
4155: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4156: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4157: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4158: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4159: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4160: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4161: MatCheckPreallocated(mat, 1);
4163: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4164: PetscCall(VecFlag(x, mat->factorerrortype));
4165: if (mat->factorerrortype) {
4166: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4167: } else if (f) {
4168: PetscCall((*f)(mat, b, y, x));
4169: } else {
4170: /* do the solve then the add manually */
4171: if (x != y) {
4172: PetscCall(MatSolveTranspose(mat, b, x));
4173: PetscCall(VecAXPY(x, one, y));
4174: } else {
4175: PetscCall(VecDuplicate(x, &tmp));
4176: PetscCall(VecCopy(x, tmp));
4177: PetscCall(MatSolveTranspose(mat, b, x));
4178: PetscCall(VecAXPY(x, one, tmp));
4179: PetscCall(VecDestroy(&tmp));
4180: }
4181: }
4182: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4183: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4184: PetscFunctionReturn(PETSC_SUCCESS);
4185: }
4187: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4188: /*@
4189: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4191: Neighbor-wise Collective
4193: Input Parameters:
4194: + mat - the matrix
4195: . b - the right-hand side
4196: . omega - the relaxation factor
4197: . flag - flag indicating the type of SOR (see below)
4198: . shift - diagonal shift
4199: . its - the number of iterations
4200: - lits - the number of local iterations
4202: Output Parameter:
4203: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4205: SOR Flags:
4206: + `SOR_FORWARD_SWEEP` - forward SOR
4207: . `SOR_BACKWARD_SWEEP` - backward SOR
4208: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4209: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4210: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4211: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4212: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4213: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4214: upper/lower triangular part of matrix to
4215: vector (with omega)
4216: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4218: Level: developer
4220: Notes:
4221: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4222: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4223: on each processor.
4225: Application programmers will not generally use `MatSOR()` directly,
4226: but instead will employ the `KSP`/`PC` interface.
4228: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4230: Most users should employ the `KSP` interface for linear solvers
4231: instead of working directly with matrix algebra routines such as this.
4232: See, e.g., `KSPCreate()`.
4234: Vectors `x` and `b` CANNOT be the same
4236: The flags are implemented as bitwise inclusive or operations.
4237: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4238: to specify a zero initial guess for SSOR.
4240: Developer Note:
4241: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4243: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4244: @*/
4245: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4246: {
4247: PetscFunctionBegin;
4252: PetscCheckSameComm(mat, 1, b, 2);
4253: PetscCheckSameComm(mat, 1, x, 8);
4254: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4255: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4256: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4257: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4258: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4259: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4260: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4261: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4263: MatCheckPreallocated(mat, 1);
4264: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4265: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4266: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4267: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4268: PetscFunctionReturn(PETSC_SUCCESS);
4269: }
4271: /*
4272: Default matrix copy routine.
4273: */
4274: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4275: {
4276: PetscInt i, rstart = 0, rend = 0, nz;
4277: const PetscInt *cwork;
4278: const PetscScalar *vwork;
4280: PetscFunctionBegin;
4281: if (B->assembled) PetscCall(MatZeroEntries(B));
4282: if (str == SAME_NONZERO_PATTERN) {
4283: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4284: for (i = rstart; i < rend; i++) {
4285: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4286: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4287: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4288: }
4289: } else {
4290: PetscCall(MatAYPX(B, 0.0, A, str));
4291: }
4292: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4293: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4294: PetscFunctionReturn(PETSC_SUCCESS);
4295: }
4297: /*@
4298: MatCopy - Copies a matrix to another matrix.
4300: Collective
4302: Input Parameters:
4303: + A - the matrix
4304: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4306: Output Parameter:
4307: . B - where the copy is put
4309: Level: intermediate
4311: Notes:
4312: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4314: `MatCopy()` copies the matrix entries of a matrix to another existing
4315: matrix (after first zeroing the second matrix). A related routine is
4316: `MatConvert()`, which first creates a new matrix and then copies the data.
4318: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4319: @*/
4320: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4321: {
4322: PetscInt i;
4324: PetscFunctionBegin;
4329: PetscCheckSameComm(A, 1, B, 2);
4330: MatCheckPreallocated(B, 2);
4331: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4332: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4333: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4334: A->cmap->N, B->cmap->N);
4335: MatCheckPreallocated(A, 1);
4336: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4338: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4339: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4340: else PetscCall(MatCopy_Basic(A, B, str));
4342: B->stencil.dim = A->stencil.dim;
4343: B->stencil.noc = A->stencil.noc;
4344: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4345: B->stencil.dims[i] = A->stencil.dims[i];
4346: B->stencil.starts[i] = A->stencil.starts[i];
4347: }
4349: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4350: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4351: PetscFunctionReturn(PETSC_SUCCESS);
4352: }
4354: /*@
4355: MatConvert - Converts a matrix to another matrix, either of the same
4356: or different type.
4358: Collective
4360: Input Parameters:
4361: + mat - the matrix
4362: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4363: same type as the original matrix.
4364: - reuse - denotes if the destination matrix is to be created or reused.
4365: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input `Mat` to be changed to contain the matrix in the new format), otherwise use
4366: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4368: Output Parameter:
4369: . M - pointer to place new matrix
4371: Level: intermediate
4373: Notes:
4374: `MatConvert()` first creates a new matrix and then copies the data from
4375: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4376: entries of one matrix to another already existing matrix context.
4378: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4379: the MPI communicator of the generated matrix is always the same as the communicator
4380: of the input matrix.
4382: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4383: @*/
4384: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4385: {
4386: PetscBool sametype, issame, flg;
4387: PetscBool3 issymmetric, ishermitian;
4388: char convname[256], mtype[256];
4389: Mat B;
4391: PetscFunctionBegin;
4394: PetscAssertPointer(M, 4);
4395: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4396: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4397: MatCheckPreallocated(mat, 1);
4399: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4400: if (flg) newtype = mtype;
4402: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4403: PetscCall(PetscStrcmp(newtype, "same", &issame));
4404: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4405: if (reuse == MAT_REUSE_MATRIX) {
4407: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4408: }
4410: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4411: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4412: PetscFunctionReturn(PETSC_SUCCESS);
4413: }
4415: /* Cache Mat options because some converters use MatHeaderReplace */
4416: issymmetric = mat->symmetric;
4417: ishermitian = mat->hermitian;
4419: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4420: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4421: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4422: } else {
4423: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4424: const char *prefix[3] = {"seq", "mpi", ""};
4425: PetscInt i;
4426: /*
4427: Order of precedence:
4428: 0) See if newtype is a superclass of the current matrix.
4429: 1) See if a specialized converter is known to the current matrix.
4430: 2) See if a specialized converter is known to the desired matrix class.
4431: 3) See if a good general converter is registered for the desired class
4432: (as of 6/27/03 only MATMPIADJ falls into this category).
4433: 4) See if a good general converter is known for the current matrix.
4434: 5) Use a really basic converter.
4435: */
4437: /* 0) See if newtype is a superclass of the current matrix.
4438: i.e mat is mpiaij and newtype is aij */
4439: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4440: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4441: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4442: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4443: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4444: if (flg) {
4445: if (reuse == MAT_INPLACE_MATRIX) {
4446: PetscCall(PetscInfo(mat, "Early return\n"));
4447: PetscFunctionReturn(PETSC_SUCCESS);
4448: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4449: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4450: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4451: PetscFunctionReturn(PETSC_SUCCESS);
4452: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4453: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4454: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4455: PetscFunctionReturn(PETSC_SUCCESS);
4456: }
4457: }
4458: }
4459: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4460: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4461: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4462: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4463: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4464: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4465: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4466: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4467: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4468: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4469: if (conv) goto foundconv;
4470: }
4472: /* 2) See if a specialized converter is known to the desired matrix class. */
4473: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4474: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4475: PetscCall(MatSetType(B, newtype));
4476: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4477: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4478: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4479: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4480: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4481: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4482: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4483: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4484: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4485: if (conv) {
4486: PetscCall(MatDestroy(&B));
4487: goto foundconv;
4488: }
4489: }
4491: /* 3) See if a good general converter is registered for the desired class */
4492: conv = B->ops->convertfrom;
4493: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4494: PetscCall(MatDestroy(&B));
4495: if (conv) goto foundconv;
4497: /* 4) See if a good general converter is known for the current matrix */
4498: if (mat->ops->convert) conv = mat->ops->convert;
4499: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4500: if (conv) goto foundconv;
4502: /* 5) Use a really basic converter. */
4503: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4504: conv = MatConvert_Basic;
4506: foundconv:
4507: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4508: PetscCall((*conv)(mat, newtype, reuse, M));
4509: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4510: /* the block sizes must be same if the mappings are copied over */
4511: (*M)->rmap->bs = mat->rmap->bs;
4512: (*M)->cmap->bs = mat->cmap->bs;
4513: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4514: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4515: (*M)->rmap->mapping = mat->rmap->mapping;
4516: (*M)->cmap->mapping = mat->cmap->mapping;
4517: }
4518: (*M)->stencil.dim = mat->stencil.dim;
4519: (*M)->stencil.noc = mat->stencil.noc;
4520: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4521: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4522: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4523: }
4524: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4525: }
4526: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4528: /* Copy Mat options */
4529: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4530: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4531: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4532: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4533: PetscFunctionReturn(PETSC_SUCCESS);
4534: }
4536: /*@
4537: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4539: Not Collective
4541: Input Parameter:
4542: . mat - the matrix, must be a factored matrix
4544: Output Parameter:
4545: . type - the string name of the package (do not free this string)
4547: Level: intermediate
4549: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4550: @*/
4551: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4552: {
4553: PetscErrorCode (*conv)(Mat, MatSolverType *);
4555: PetscFunctionBegin;
4558: PetscAssertPointer(type, 2);
4559: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4560: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4561: if (conv) PetscCall((*conv)(mat, type));
4562: else *type = MATSOLVERPETSC;
4563: PetscFunctionReturn(PETSC_SUCCESS);
4564: }
4566: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4567: struct _MatSolverTypeForSpecifcType {
4568: MatType mtype;
4569: /* no entry for MAT_FACTOR_NONE */
4570: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4571: MatSolverTypeForSpecifcType next;
4572: };
4574: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4575: struct _MatSolverTypeHolder {
4576: char *name;
4577: MatSolverTypeForSpecifcType handlers;
4578: MatSolverTypeHolder next;
4579: };
4581: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4583: /*@C
4584: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4586: Logically Collective, No Fortran Support
4588: Input Parameters:
4589: + package - name of the package, for example `petsc` or `superlu`
4590: . mtype - the matrix type that works with this package
4591: . ftype - the type of factorization supported by the package
4592: - createfactor - routine that will create the factored matrix ready to be used
4594: Level: developer
4596: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4597: `MatGetFactor()`
4598: @*/
4599: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4600: {
4601: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4602: PetscBool flg;
4603: MatSolverTypeForSpecifcType inext, iprev = NULL;
4605: PetscFunctionBegin;
4606: PetscCall(MatInitializePackage());
4607: if (!next) {
4608: PetscCall(PetscNew(&MatSolverTypeHolders));
4609: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4610: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4611: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4612: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4613: PetscFunctionReturn(PETSC_SUCCESS);
4614: }
4615: while (next) {
4616: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4617: if (flg) {
4618: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4619: inext = next->handlers;
4620: while (inext) {
4621: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4622: if (flg) {
4623: inext->createfactor[(int)ftype - 1] = createfactor;
4624: PetscFunctionReturn(PETSC_SUCCESS);
4625: }
4626: iprev = inext;
4627: inext = inext->next;
4628: }
4629: PetscCall(PetscNew(&iprev->next));
4630: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4631: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4632: PetscFunctionReturn(PETSC_SUCCESS);
4633: }
4634: prev = next;
4635: next = next->next;
4636: }
4637: PetscCall(PetscNew(&prev->next));
4638: PetscCall(PetscStrallocpy(package, &prev->next->name));
4639: PetscCall(PetscNew(&prev->next->handlers));
4640: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4641: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4642: PetscFunctionReturn(PETSC_SUCCESS);
4643: }
4645: /*@C
4646: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4648: Input Parameters:
4649: + type - name of the package, for example `petsc` or `superlu`, if this is 'NULL', then the first result that satisfies the other criteria is returned
4650: . ftype - the type of factorization supported by the type
4651: - mtype - the matrix type that works with this type
4653: Output Parameters:
4654: + foundtype - `PETSC_TRUE` if the type was registered
4655: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4656: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4658: Calling sequence of `createfactor`:
4659: + A - the matrix providing the factor matrix
4660: . ftype - the `MatFactorType` of the factor requested
4661: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4663: Level: developer
4665: Note:
4666: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4667: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4668: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4670: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4671: `MatInitializePackage()`
4672: @*/
4673: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4674: {
4675: MatSolverTypeHolder next = MatSolverTypeHolders;
4676: PetscBool flg;
4677: MatSolverTypeForSpecifcType inext;
4679: PetscFunctionBegin;
4680: if (foundtype) *foundtype = PETSC_FALSE;
4681: if (foundmtype) *foundmtype = PETSC_FALSE;
4682: if (createfactor) *createfactor = NULL;
4684: if (type) {
4685: while (next) {
4686: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4687: if (flg) {
4688: if (foundtype) *foundtype = PETSC_TRUE;
4689: inext = next->handlers;
4690: while (inext) {
4691: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4692: if (flg) {
4693: if (foundmtype) *foundmtype = PETSC_TRUE;
4694: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4695: PetscFunctionReturn(PETSC_SUCCESS);
4696: }
4697: inext = inext->next;
4698: }
4699: }
4700: next = next->next;
4701: }
4702: } else {
4703: while (next) {
4704: inext = next->handlers;
4705: while (inext) {
4706: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4707: if (flg && inext->createfactor[(int)ftype - 1]) {
4708: if (foundtype) *foundtype = PETSC_TRUE;
4709: if (foundmtype) *foundmtype = PETSC_TRUE;
4710: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4711: PetscFunctionReturn(PETSC_SUCCESS);
4712: }
4713: inext = inext->next;
4714: }
4715: next = next->next;
4716: }
4717: /* try with base classes inext->mtype */
4718: next = MatSolverTypeHolders;
4719: while (next) {
4720: inext = next->handlers;
4721: while (inext) {
4722: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4723: if (flg && inext->createfactor[(int)ftype - 1]) {
4724: if (foundtype) *foundtype = PETSC_TRUE;
4725: if (foundmtype) *foundmtype = PETSC_TRUE;
4726: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4727: PetscFunctionReturn(PETSC_SUCCESS);
4728: }
4729: inext = inext->next;
4730: }
4731: next = next->next;
4732: }
4733: }
4734: PetscFunctionReturn(PETSC_SUCCESS);
4735: }
4737: PetscErrorCode MatSolverTypeDestroy(void)
4738: {
4739: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4740: MatSolverTypeForSpecifcType inext, iprev;
4742: PetscFunctionBegin;
4743: while (next) {
4744: PetscCall(PetscFree(next->name));
4745: inext = next->handlers;
4746: while (inext) {
4747: PetscCall(PetscFree(inext->mtype));
4748: iprev = inext;
4749: inext = inext->next;
4750: PetscCall(PetscFree(iprev));
4751: }
4752: prev = next;
4753: next = next->next;
4754: PetscCall(PetscFree(prev));
4755: }
4756: MatSolverTypeHolders = NULL;
4757: PetscFunctionReturn(PETSC_SUCCESS);
4758: }
4760: /*@
4761: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4763: Logically Collective
4765: Input Parameter:
4766: . mat - the matrix
4768: Output Parameter:
4769: . flg - `PETSC_TRUE` if uses the ordering
4771: Level: developer
4773: Note:
4774: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4775: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4777: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4778: @*/
4779: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4780: {
4781: PetscFunctionBegin;
4782: *flg = mat->canuseordering;
4783: PetscFunctionReturn(PETSC_SUCCESS);
4784: }
4786: /*@
4787: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4789: Logically Collective
4791: Input Parameters:
4792: + mat - the matrix obtained with `MatGetFactor()`
4793: - ftype - the factorization type to be used
4795: Output Parameter:
4796: . otype - the preferred ordering type
4798: Level: developer
4800: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4801: @*/
4802: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4803: {
4804: PetscFunctionBegin;
4805: *otype = mat->preferredordering[ftype];
4806: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4807: PetscFunctionReturn(PETSC_SUCCESS);
4808: }
4810: /*@
4811: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()
4813: Collective
4815: Input Parameters:
4816: + mat - the matrix
4817: . type - name of solver type, for example, `superlu`, `petsc` (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4818: the other criteria is returned
4819: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4821: Output Parameter:
4822: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4824: Options Database Keys:
4825: + -pc_factor_mat_solver_type <type> - choose the type at run time. When using `KSP` solvers
4826: . -pc_factor_mat_factor_on_host <bool> - do mat factorization on host (with device matrices). Default is doing it on device
4827: - -pc_factor_mat_solve_on_host <bool> - do mat solve on host (with device matrices). Default is doing it on device
4829: Level: intermediate
4831: Notes:
4832: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4833: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4835: Users usually access the factorization solvers via `KSP`
4837: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4838: such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir
4840: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4841: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4842: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4844: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4845: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4846: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4848: Developer Note:
4849: This should actually be called `MatCreateFactor()` since it creates a new factor object
4851: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4852: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4853: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4854: @*/
4855: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4856: {
4857: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4858: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4860: PetscFunctionBegin;
4864: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4865: MatCheckPreallocated(mat, 1);
4867: PetscCall(MatIsShell(mat, &shell));
4868: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4869: if (hasop) {
4870: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4871: PetscFunctionReturn(PETSC_SUCCESS);
4872: }
4874: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4875: if (!foundtype) {
4876: if (type) {
4877: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4878: ((PetscObject)mat)->type_name, type);
4879: } else {
4880: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4881: }
4882: }
4883: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4884: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4886: PetscCall((*conv)(mat, ftype, f));
4887: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4888: PetscFunctionReturn(PETSC_SUCCESS);
4889: }
4891: /*@
4892: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4894: Not Collective
4896: Input Parameters:
4897: + mat - the matrix
4898: . type - name of solver type, for example, `superlu`, `petsc` (to use PETSc's default)
4899: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4901: Output Parameter:
4902: . flg - PETSC_TRUE if the factorization is available
4904: Level: intermediate
4906: Notes:
4907: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4908: such as pastix, superlu, mumps etc.
4910: PETSc must have been ./configure to use the external solver, using the option --download-package
4912: Developer Note:
4913: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4915: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4916: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4917: @*/
4918: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4919: {
4920: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4922: PetscFunctionBegin;
4924: PetscAssertPointer(flg, 4);
4926: *flg = PETSC_FALSE;
4927: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
4929: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4930: MatCheckPreallocated(mat, 1);
4932: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4933: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4934: PetscFunctionReturn(PETSC_SUCCESS);
4935: }
4937: /*@
4938: MatDuplicate - Duplicates a matrix including the non-zero structure.
4940: Collective
4942: Input Parameters:
4943: + mat - the matrix
4944: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4945: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4947: Output Parameter:
4948: . M - pointer to place new matrix
4950: Level: intermediate
4952: Notes:
4953: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
4955: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
4957: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4959: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4960: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4961: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
4963: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4964: @*/
4965: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4966: {
4967: Mat B;
4968: VecType vtype;
4969: PetscInt i;
4970: PetscObject dm, container_h, container_d;
4971: PetscErrorCodeFn *viewf;
4973: PetscFunctionBegin;
4976: PetscAssertPointer(M, 3);
4977: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4978: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4979: MatCheckPreallocated(mat, 1);
4981: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4982: PetscUseTypeMethod(mat, duplicate, op, M);
4983: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4984: B = *M;
4986: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4987: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4988: PetscCall(MatGetVecType(mat, &vtype));
4989: PetscCall(MatSetVecType(B, vtype));
4991: B->stencil.dim = mat->stencil.dim;
4992: B->stencil.noc = mat->stencil.noc;
4993: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4994: B->stencil.dims[i] = mat->stencil.dims[i];
4995: B->stencil.starts[i] = mat->stencil.starts[i];
4996: }
4998: B->nooffproczerorows = mat->nooffproczerorows;
4999: B->nooffprocentries = mat->nooffprocentries;
5001: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
5002: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
5003: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
5004: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
5005: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
5006: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
5007: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
5008: PetscCall(PetscObjectStateIncrease((PetscObject)B));
5009: PetscFunctionReturn(PETSC_SUCCESS);
5010: }
5012: /*@
5013: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
5015: Logically Collective
5017: Input Parameter:
5018: . mat - the matrix
5020: Output Parameter:
5021: . v - the diagonal of the matrix
5023: Level: intermediate
5025: Note:
5026: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
5027: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
5028: is larger than `ndiag`, the values of the remaining entries are unspecified.
5030: Currently only correct in parallel for square matrices.
5032: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
5033: @*/
5034: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
5035: {
5036: PetscFunctionBegin;
5040: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5041: MatCheckPreallocated(mat, 1);
5042: if (PetscDefined(USE_DEBUG)) {
5043: PetscInt nv, row, col, ndiag;
5045: PetscCall(VecGetLocalSize(v, &nv));
5046: PetscCall(MatGetLocalSize(mat, &row, &col));
5047: ndiag = PetscMin(row, col);
5048: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5049: }
5051: PetscUseTypeMethod(mat, getdiagonal, v);
5052: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5053: PetscFunctionReturn(PETSC_SUCCESS);
5054: }
5056: /*@
5057: MatGetRowMin - Gets the minimum value (of the real part) of each
5058: row of the matrix
5060: Logically Collective
5062: Input Parameter:
5063: . mat - the matrix
5065: Output Parameters:
5066: + v - the vector for storing the maximums
5067: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5069: Level: intermediate
5071: Note:
5072: The result of this call are the same as if one converted the matrix to dense format
5073: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5075: This code is only implemented for a couple of matrix formats.
5077: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5078: `MatGetRowMax()`
5079: @*/
5080: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5081: {
5082: PetscFunctionBegin;
5086: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5088: if (!mat->cmap->N) {
5089: PetscCall(VecSet(v, PETSC_MAX_REAL));
5090: if (idx) {
5091: PetscInt i, m = mat->rmap->n;
5092: for (i = 0; i < m; i++) idx[i] = -1;
5093: }
5094: } else {
5095: MatCheckPreallocated(mat, 1);
5096: }
5097: PetscUseTypeMethod(mat, getrowmin, v, idx);
5098: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5099: PetscFunctionReturn(PETSC_SUCCESS);
5100: }
5102: /*@
5103: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5104: row of the matrix
5106: Logically Collective
5108: Input Parameter:
5109: . mat - the matrix
5111: Output Parameters:
5112: + v - the vector for storing the minimums
5113: - idx - the indices of the column found for each row (or `NULL` if not needed)
5115: Level: intermediate
5117: Notes:
5118: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5119: row is 0 (the first column).
5121: This code is only implemented for a couple of matrix formats.
5123: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5124: @*/
5125: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5126: {
5127: PetscFunctionBegin;
5131: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5132: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5134: if (!mat->cmap->N) {
5135: PetscCall(VecSet(v, 0.0));
5136: if (idx) {
5137: PetscInt i, m = mat->rmap->n;
5138: for (i = 0; i < m; i++) idx[i] = -1;
5139: }
5140: } else {
5141: MatCheckPreallocated(mat, 1);
5142: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5143: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5144: }
5145: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5146: PetscFunctionReturn(PETSC_SUCCESS);
5147: }
5149: /*@
5150: MatGetRowMax - Gets the maximum value (of the real part) of each
5151: row of the matrix
5153: Logically Collective
5155: Input Parameter:
5156: . mat - the matrix
5158: Output Parameters:
5159: + v - the vector for storing the maximums
5160: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5162: Level: intermediate
5164: Notes:
5165: The result of this call are the same as if one converted the matrix to dense format
5166: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5168: This code is only implemented for a couple of matrix formats.
5170: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5171: @*/
5172: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5173: {
5174: PetscFunctionBegin;
5178: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5180: if (!mat->cmap->N) {
5181: PetscCall(VecSet(v, PETSC_MIN_REAL));
5182: if (idx) {
5183: PetscInt i, m = mat->rmap->n;
5184: for (i = 0; i < m; i++) idx[i] = -1;
5185: }
5186: } else {
5187: MatCheckPreallocated(mat, 1);
5188: PetscUseTypeMethod(mat, getrowmax, v, idx);
5189: }
5190: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5191: PetscFunctionReturn(PETSC_SUCCESS);
5192: }
5194: /*@
5195: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5196: row of the matrix
5198: Logically Collective
5200: Input Parameter:
5201: . mat - the matrix
5203: Output Parameters:
5204: + v - the vector for storing the maximums
5205: - idx - the indices of the column found for each row (or `NULL` if not needed)
5207: Level: intermediate
5209: Notes:
5210: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5211: row is 0 (the first column).
5213: This code is only implemented for a couple of matrix formats.
5215: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5216: @*/
5217: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5218: {
5219: PetscFunctionBegin;
5223: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5225: if (!mat->cmap->N) {
5226: PetscCall(VecSet(v, 0.0));
5227: if (idx) {
5228: PetscInt i, m = mat->rmap->n;
5229: for (i = 0; i < m; i++) idx[i] = -1;
5230: }
5231: } else {
5232: MatCheckPreallocated(mat, 1);
5233: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5234: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5235: }
5236: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5237: PetscFunctionReturn(PETSC_SUCCESS);
5238: }
5240: /*@
5241: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5243: Logically Collective
5245: Input Parameter:
5246: . mat - the matrix
5248: Output Parameter:
5249: . v - the vector for storing the sum
5251: Level: intermediate
5253: This code is only implemented for a couple of matrix formats.
5255: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5256: @*/
5257: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5258: {
5259: PetscFunctionBegin;
5263: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5265: if (!mat->cmap->N) {
5266: PetscCall(VecSet(v, 0.0));
5267: } else {
5268: MatCheckPreallocated(mat, 1);
5269: PetscUseTypeMethod(mat, getrowsumabs, v);
5270: }
5271: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5272: PetscFunctionReturn(PETSC_SUCCESS);
5273: }
5275: /*@
5276: MatGetRowSum - Gets the sum of each row of the matrix
5278: Logically or Neighborhood Collective
5280: Input Parameter:
5281: . mat - the matrix
5283: Output Parameter:
5284: . v - the vector for storing the sum of rows
5286: Level: intermediate
5288: Note:
5289: This code is slow since it is not currently specialized for different formats
5291: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5292: @*/
5293: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5294: {
5295: Vec ones;
5297: PetscFunctionBegin;
5301: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5302: MatCheckPreallocated(mat, 1);
5303: PetscCall(MatCreateVecs(mat, &ones, NULL));
5304: PetscCall(VecSet(ones, 1.));
5305: PetscCall(MatMult(mat, ones, v));
5306: PetscCall(VecDestroy(&ones));
5307: PetscFunctionReturn(PETSC_SUCCESS);
5308: }
5310: /*@
5311: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5312: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5314: Collective
5316: Input Parameter:
5317: . mat - the matrix to provide the transpose
5319: Output Parameter:
5320: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5322: Level: advanced
5324: Note:
5325: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5326: routine allows bypassing that call.
5328: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5329: @*/
5330: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5331: {
5332: MatParentState *rb = NULL;
5334: PetscFunctionBegin;
5335: PetscCall(PetscNew(&rb));
5336: rb->id = ((PetscObject)mat)->id;
5337: rb->state = 0;
5338: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5339: PetscCall(PetscObjectContainerCompose((PetscObject)B, "MatTransposeParent", rb, PetscCtxDestroyDefault));
5340: PetscFunctionReturn(PETSC_SUCCESS);
5341: }
5343: /*@
5344: MatTranspose - Computes the transpose of a matrix, either in-place or out-of-place.
5346: Collective
5348: Input Parameters:
5349: + mat - the matrix to transpose
5350: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5352: Output Parameter:
5353: . B - the transpose of the matrix
5355: Level: intermediate
5357: Notes:
5358: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5360: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX` to store the transpose. If you already have a matrix to contain the
5361: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5363: If the nonzero structure of `mat` changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5365: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose but don't need the storage to be changed.
5366: For example, the result of `MatCreateTranspose()` will compute the transpose of the given matrix times a vector for matrix-vector products computed with `MatMult()`.
5368: If `mat` is unchanged from the last call this function returns immediately without recomputing the result
5370: If you only need the symbolic transpose of a matrix, and not the numerical values, use `MatTransposeSymbolic()`
5372: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5373: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5374: @*/
5375: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5376: {
5377: PetscContainer rB = NULL;
5378: MatParentState *rb = NULL;
5380: PetscFunctionBegin;
5383: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5384: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5385: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5386: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5387: MatCheckPreallocated(mat, 1);
5388: if (reuse == MAT_REUSE_MATRIX) {
5389: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5390: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5391: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5392: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5393: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5394: }
5396: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5397: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5398: PetscUseTypeMethod(mat, transpose, reuse, B);
5399: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5400: }
5401: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5403: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5404: if (reuse != MAT_INPLACE_MATRIX) {
5405: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5406: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5407: rb->state = ((PetscObject)mat)->state;
5408: rb->nonzerostate = mat->nonzerostate;
5409: }
5410: PetscFunctionReturn(PETSC_SUCCESS);
5411: }
5413: /*@
5414: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5416: Collective
5418: Input Parameter:
5419: . A - the matrix to transpose
5421: Output Parameter:
5422: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5423: numerical portion.
5425: Level: intermediate
5427: Note:
5428: This is not supported for many matrix types, use `MatTranspose()` in those cases
5430: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5431: @*/
5432: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5433: {
5434: PetscFunctionBegin;
5437: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5438: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5439: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5440: PetscUseTypeMethod(A, transposesymbolic, B);
5441: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5443: PetscCall(MatTransposeSetPrecursor(A, *B));
5444: PetscFunctionReturn(PETSC_SUCCESS);
5445: }
5447: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5448: {
5449: PetscContainer rB;
5450: MatParentState *rb;
5452: PetscFunctionBegin;
5455: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5456: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5457: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5458: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5459: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5460: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5461: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5462: PetscFunctionReturn(PETSC_SUCCESS);
5463: }
5465: /*@
5466: MatIsTranspose - Test whether a matrix is another one's transpose,
5467: or its own, in which case it tests symmetry.
5469: Collective
5471: Input Parameters:
5472: + A - the matrix to test
5473: . B - the matrix to test against, this can equal the first parameter
5474: - tol - tolerance, differences between entries smaller than this are counted as zero
5476: Output Parameter:
5477: . flg - the result
5479: Level: intermediate
5481: Notes:
5482: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5483: test involves parallel copies of the block off-diagonal parts of the matrix.
5485: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5486: @*/
5487: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5488: {
5489: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5491: PetscFunctionBegin;
5494: PetscAssertPointer(flg, 4);
5495: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5496: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5497: *flg = PETSC_FALSE;
5498: if (f && g) {
5499: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5500: PetscCall((*f)(A, B, tol, flg));
5501: } else {
5502: MatType mattype;
5504: PetscCall(MatGetType(f ? B : A, &mattype));
5505: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5506: }
5507: PetscFunctionReturn(PETSC_SUCCESS);
5508: }
5510: /*@
5511: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5513: Collective
5515: Input Parameters:
5516: + mat - the matrix to transpose and complex conjugate
5517: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5519: Output Parameter:
5520: . B - the Hermitian transpose
5522: Level: intermediate
5524: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5525: @*/
5526: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5527: {
5528: PetscFunctionBegin;
5529: PetscCall(MatTranspose(mat, reuse, B));
5530: #if defined(PETSC_USE_COMPLEX)
5531: PetscCall(MatConjugate(*B));
5532: #endif
5533: PetscFunctionReturn(PETSC_SUCCESS);
5534: }
5536: /*@
5537: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5539: Collective
5541: Input Parameters:
5542: + A - the matrix to test
5543: . B - the matrix to test against, this can equal the first parameter
5544: - tol - tolerance, differences between entries smaller than this are counted as zero
5546: Output Parameter:
5547: . flg - the result
5549: Level: intermediate
5551: Notes:
5552: Only available for `MATAIJ` matrices.
5554: The sequential algorithm
5555: has a running time of the order of the number of nonzeros; the parallel
5556: test involves parallel copies of the block off-diagonal parts of the matrix.
5558: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5559: @*/
5560: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5561: {
5562: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5564: PetscFunctionBegin;
5567: PetscAssertPointer(flg, 4);
5568: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5569: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5570: if (f && g) {
5571: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5572: PetscCall((*f)(A, B, tol, flg));
5573: }
5574: PetscFunctionReturn(PETSC_SUCCESS);
5575: }
5577: /*@
5578: MatPermute - Creates a new matrix with rows and columns permuted from the
5579: original.
5581: Collective
5583: Input Parameters:
5584: + mat - the matrix to permute
5585: . row - row permutation, each processor supplies only the permutation for its rows
5586: - col - column permutation, each processor supplies only the permutation for its columns
5588: Output Parameter:
5589: . B - the permuted matrix
5591: Level: advanced
5593: Note:
5594: The index sets map from row/col of permuted matrix to row/col of original matrix.
5595: The index sets should be on the same communicator as mat and have the same local sizes.
5597: Developer Note:
5598: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5599: exploit the fact that row and col are permutations, consider implementing the
5600: more general `MatCreateSubMatrix()` instead.
5602: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5603: @*/
5604: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5605: {
5606: PetscFunctionBegin;
5611: PetscAssertPointer(B, 4);
5612: PetscCheckSameComm(mat, 1, row, 2);
5613: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5614: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5615: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5616: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5617: MatCheckPreallocated(mat, 1);
5619: if (mat->ops->permute) {
5620: PetscUseTypeMethod(mat, permute, row, col, B);
5621: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5622: } else {
5623: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5624: }
5625: PetscFunctionReturn(PETSC_SUCCESS);
5626: }
5628: /*@
5629: MatEqual - Compares two matrices.
5631: Collective
5633: Input Parameters:
5634: + A - the first matrix
5635: - B - the second matrix
5637: Output Parameter:
5638: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5640: Level: intermediate
5642: Note:
5643: If either of the matrix is "matrix-free", meaning the matrix entries are not stored explicitly then equality is determined by comparing
5644: the results of several matrix-vector product using randomly created vectors, see `MatMultEqual()`.
5646: .seealso: [](ch_matrices), `Mat`, `MatMultEqual()`
5647: @*/
5648: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5649: {
5650: PetscFunctionBegin;
5655: PetscAssertPointer(flg, 3);
5656: PetscCheckSameComm(A, 1, B, 2);
5657: MatCheckPreallocated(A, 1);
5658: MatCheckPreallocated(B, 2);
5659: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5660: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5661: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5662: B->cmap->N);
5663: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5664: PetscUseTypeMethod(A, equal, B, flg);
5665: } else {
5666: PetscCall(MatMultEqual(A, B, 10, flg));
5667: }
5668: PetscFunctionReturn(PETSC_SUCCESS);
5669: }
5671: /*@
5672: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5673: matrices that are stored as vectors. Either of the two scaling
5674: matrices can be `NULL`.
5676: Collective
5678: Input Parameters:
5679: + mat - the matrix to be scaled
5680: . l - the left scaling vector (or `NULL`)
5681: - r - the right scaling vector (or `NULL`)
5683: Level: intermediate
5685: Note:
5686: `MatDiagonalScale()` computes $A = LAR$, where
5687: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5688: The L scales the rows of the matrix, the R scales the columns of the matrix.
5690: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5691: @*/
5692: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5693: {
5694: PetscFunctionBegin;
5697: if (l) {
5699: PetscCheckSameComm(mat, 1, l, 2);
5700: }
5701: if (r) {
5703: PetscCheckSameComm(mat, 1, r, 3);
5704: }
5705: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5706: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5707: MatCheckPreallocated(mat, 1);
5708: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5710: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5711: PetscUseTypeMethod(mat, diagonalscale, l, r);
5712: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5713: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5714: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5715: PetscFunctionReturn(PETSC_SUCCESS);
5716: }
5718: /*@
5719: MatScale - Scales all elements of a matrix by a given number.
5721: Logically Collective
5723: Input Parameters:
5724: + mat - the matrix to be scaled
5725: - a - the scaling value
5727: Level: intermediate
5729: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5730: @*/
5731: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5732: {
5733: PetscFunctionBegin;
5736: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5737: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5739: MatCheckPreallocated(mat, 1);
5741: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5742: if (a != (PetscScalar)1.0) {
5743: PetscUseTypeMethod(mat, scale, a);
5744: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5745: }
5746: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5747: PetscFunctionReturn(PETSC_SUCCESS);
5748: }
5750: /*@
5751: MatNorm - Calculates various norms of a matrix.
5753: Collective
5755: Input Parameters:
5756: + mat - the matrix
5757: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5759: Output Parameter:
5760: . nrm - the resulting norm
5762: Level: intermediate
5764: .seealso: [](ch_matrices), `Mat`
5765: @*/
5766: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5767: {
5768: PetscFunctionBegin;
5771: PetscAssertPointer(nrm, 3);
5773: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5774: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5775: MatCheckPreallocated(mat, 1);
5777: PetscUseTypeMethod(mat, norm, type, nrm);
5778: PetscFunctionReturn(PETSC_SUCCESS);
5779: }
5781: /*
5782: This variable is used to prevent counting of MatAssemblyBegin() that
5783: are called from within a MatAssemblyEnd().
5784: */
5785: static PetscInt MatAssemblyEnd_InUse = 0;
5786: /*@
5787: MatAssemblyBegin - Begins assembling the matrix. This routine should
5788: be called after completing all calls to `MatSetValues()`.
5790: Collective
5792: Input Parameters:
5793: + mat - the matrix
5794: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5796: Level: beginner
5798: Notes:
5799: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5800: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5802: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5803: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5804: using the matrix.
5806: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5807: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5808: a global collective operation requiring all processes that share the matrix.
5810: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5811: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5812: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5814: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5815: @*/
5816: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5817: {
5818: PetscFunctionBegin;
5821: MatCheckPreallocated(mat, 1);
5822: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5823: if (mat->assembled) {
5824: mat->was_assembled = PETSC_TRUE;
5825: mat->assembled = PETSC_FALSE;
5826: }
5828: if (!MatAssemblyEnd_InUse) {
5829: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5830: PetscTryTypeMethod(mat, assemblybegin, type);
5831: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5832: } else PetscTryTypeMethod(mat, assemblybegin, type);
5833: PetscFunctionReturn(PETSC_SUCCESS);
5834: }
5836: /*@
5837: MatAssembled - Indicates if a matrix has been assembled and is ready for
5838: use; for example, in matrix-vector product.
5840: Not Collective
5842: Input Parameter:
5843: . mat - the matrix
5845: Output Parameter:
5846: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5848: Level: advanced
5850: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5851: @*/
5852: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5853: {
5854: PetscFunctionBegin;
5856: PetscAssertPointer(assembled, 2);
5857: *assembled = mat->assembled;
5858: PetscFunctionReturn(PETSC_SUCCESS);
5859: }
5861: /*@
5862: MatAssemblyEnd - Completes assembling the matrix. This routine should
5863: be called after `MatAssemblyBegin()`.
5865: Collective
5867: Input Parameters:
5868: + mat - the matrix
5869: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5871: Options Database Keys:
5872: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5873: . -mat_view ::ascii_info_detail - Prints more detailed info
5874: . -mat_view - Prints matrix in ASCII format
5875: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5876: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5877: . -display <name> - Sets display name (default is host)
5878: . -draw_pause <sec> - Sets number of seconds to pause after display
5879: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5880: . -viewer_socket_machine <machine> - Machine to use for socket
5881: . -viewer_socket_port <port> - Port number to use for socket
5882: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5884: Level: beginner
5886: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5887: @*/
5888: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5889: {
5890: static PetscInt inassm = 0;
5891: PetscBool flg = PETSC_FALSE;
5893: PetscFunctionBegin;
5897: inassm++;
5898: MatAssemblyEnd_InUse++;
5899: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5900: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5901: PetscTryTypeMethod(mat, assemblyend, type);
5902: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5903: } else PetscTryTypeMethod(mat, assemblyend, type);
5905: /* Flush assembly is not a true assembly */
5906: if (type != MAT_FLUSH_ASSEMBLY) {
5907: if (mat->num_ass) {
5908: if (!mat->symmetry_eternal) {
5909: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5910: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5911: }
5912: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5913: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5914: }
5915: mat->num_ass++;
5916: mat->assembled = PETSC_TRUE;
5917: mat->ass_nonzerostate = mat->nonzerostate;
5918: }
5920: mat->insertmode = NOT_SET_VALUES;
5921: MatAssemblyEnd_InUse--;
5922: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5923: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5924: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5926: if (mat->checksymmetryonassembly) {
5927: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5928: if (flg) {
5929: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5930: } else {
5931: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5932: }
5933: }
5934: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5935: }
5936: inassm--;
5937: PetscFunctionReturn(PETSC_SUCCESS);
5938: }
5940: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5941: /*@
5942: MatSetOption - Sets a parameter option for a matrix. Some options
5943: may be specific to certain storage formats. Some options
5944: determine how values will be inserted (or added). Sorted,
5945: row-oriented input will generally assemble the fastest. The default
5946: is row-oriented.
5948: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5950: Input Parameters:
5951: + mat - the matrix
5952: . op - the option, one of those listed below (and possibly others),
5953: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5955: Options Describing Matrix Structure:
5956: + `MAT_SPD` - symmetric positive definite
5957: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5958: . `MAT_HERMITIAN` - transpose is the complex conjugation
5959: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5960: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5961: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5962: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5964: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5965: do not need to be computed (usually at a high cost)
5967: Options For Use with `MatSetValues()`:
5968: Insert a logically dense subblock, which can be
5969: . `MAT_ROW_ORIENTED` - row-oriented (default)
5971: These options reflect the data you pass in with `MatSetValues()`; it has
5972: nothing to do with how the data is stored internally in the matrix
5973: data structure.
5975: When (re)assembling a matrix, we can restrict the input for
5976: efficiency/debugging purposes. These options include
5977: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5978: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5979: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5980: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5981: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5982: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5983: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5984: performance for very large process counts.
5985: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5986: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5987: functions, instead sending only neighbor messages.
5989: Level: intermediate
5991: Notes:
5992: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5994: Some options are relevant only for particular matrix types and
5995: are thus ignored by others. Other options are not supported by
5996: certain matrix types and will generate an error message if set.
5998: If using Fortran to compute a matrix, one may need to
5999: use the column-oriented option (or convert to the row-oriented
6000: format).
6002: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
6003: that would generate a new entry in the nonzero structure is instead
6004: ignored. Thus, if memory has not already been allocated for this particular
6005: data, then the insertion is ignored. For dense matrices, in which
6006: the entire array is allocated, no entries are ever ignored.
6007: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6009: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
6010: that would generate a new entry in the nonzero structure instead produces
6011: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6013: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
6014: that would generate a new entry that has not been preallocated will
6015: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
6016: only.) This is a useful flag when debugging matrix memory preallocation.
6017: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6019: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
6020: other processors should be dropped, rather than stashed.
6021: This is useful if you know that the "owning" processor is also
6022: always generating the correct matrix entries, so that PETSc need
6023: not transfer duplicate entries generated on another processor.
6025: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6026: searches during matrix assembly. When this flag is set, the hash table
6027: is created during the first matrix assembly. This hash table is
6028: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6029: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6030: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6031: supported by `MATMPIBAIJ` format only.
6033: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6034: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6036: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6037: a zero location in the matrix
6039: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6041: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6042: zero row routines and thus improves performance for very large process counts.
6044: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6045: part of the matrix (since they should match the upper triangular part).
6047: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6048: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6049: with finite difference schemes with non-periodic boundary conditions.
6051: Developer Note:
6052: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6053: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6054: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6055: not changed.
6057: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6058: @*/
6059: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6060: {
6061: PetscFunctionBegin;
6063: if (op > 0) {
6066: }
6068: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6070: switch (op) {
6071: case MAT_FORCE_DIAGONAL_ENTRIES:
6072: mat->force_diagonals = flg;
6073: PetscFunctionReturn(PETSC_SUCCESS);
6074: case MAT_NO_OFF_PROC_ENTRIES:
6075: mat->nooffprocentries = flg;
6076: PetscFunctionReturn(PETSC_SUCCESS);
6077: case MAT_SUBSET_OFF_PROC_ENTRIES:
6078: mat->assembly_subset = flg;
6079: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6080: #if !defined(PETSC_HAVE_MPIUNI)
6081: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6082: #endif
6083: mat->stash.first_assembly_done = PETSC_FALSE;
6084: }
6085: PetscFunctionReturn(PETSC_SUCCESS);
6086: case MAT_NO_OFF_PROC_ZERO_ROWS:
6087: mat->nooffproczerorows = flg;
6088: PetscFunctionReturn(PETSC_SUCCESS);
6089: case MAT_SPD:
6090: if (flg) {
6091: mat->spd = PETSC_BOOL3_TRUE;
6092: mat->symmetric = PETSC_BOOL3_TRUE;
6093: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6094: } else {
6095: mat->spd = PETSC_BOOL3_FALSE;
6096: }
6097: break;
6098: case MAT_SYMMETRIC:
6099: mat->symmetric = PetscBoolToBool3(flg);
6100: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6101: #if !defined(PETSC_USE_COMPLEX)
6102: mat->hermitian = PetscBoolToBool3(flg);
6103: #endif
6104: break;
6105: case MAT_HERMITIAN:
6106: mat->hermitian = PetscBoolToBool3(flg);
6107: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6108: #if !defined(PETSC_USE_COMPLEX)
6109: mat->symmetric = PetscBoolToBool3(flg);
6110: #endif
6111: break;
6112: case MAT_STRUCTURALLY_SYMMETRIC:
6113: mat->structurally_symmetric = PetscBoolToBool3(flg);
6114: break;
6115: case MAT_SYMMETRY_ETERNAL:
6116: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6117: mat->symmetry_eternal = flg;
6118: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6119: break;
6120: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6121: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6122: mat->structural_symmetry_eternal = flg;
6123: break;
6124: case MAT_SPD_ETERNAL:
6125: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6126: mat->spd_eternal = flg;
6127: if (flg) {
6128: mat->structural_symmetry_eternal = PETSC_TRUE;
6129: mat->symmetry_eternal = PETSC_TRUE;
6130: }
6131: break;
6132: case MAT_STRUCTURE_ONLY:
6133: mat->structure_only = flg;
6134: break;
6135: case MAT_SORTED_FULL:
6136: mat->sortedfull = flg;
6137: break;
6138: default:
6139: break;
6140: }
6141: PetscTryTypeMethod(mat, setoption, op, flg);
6142: PetscFunctionReturn(PETSC_SUCCESS);
6143: }
6145: /*@
6146: MatGetOption - Gets a parameter option that has been set for a matrix.
6148: Logically Collective
6150: Input Parameters:
6151: + mat - the matrix
6152: - op - the option, this only responds to certain options, check the code for which ones
6154: Output Parameter:
6155: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6157: Level: intermediate
6159: Notes:
6160: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6162: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6163: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6165: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6166: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6167: @*/
6168: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6169: {
6170: PetscFunctionBegin;
6174: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6175: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6177: switch (op) {
6178: case MAT_NO_OFF_PROC_ENTRIES:
6179: *flg = mat->nooffprocentries;
6180: break;
6181: case MAT_NO_OFF_PROC_ZERO_ROWS:
6182: *flg = mat->nooffproczerorows;
6183: break;
6184: case MAT_SYMMETRIC:
6185: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6186: break;
6187: case MAT_HERMITIAN:
6188: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6189: break;
6190: case MAT_STRUCTURALLY_SYMMETRIC:
6191: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6192: break;
6193: case MAT_SPD:
6194: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6195: break;
6196: case MAT_SYMMETRY_ETERNAL:
6197: *flg = mat->symmetry_eternal;
6198: break;
6199: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6200: *flg = mat->symmetry_eternal;
6201: break;
6202: default:
6203: break;
6204: }
6205: PetscFunctionReturn(PETSC_SUCCESS);
6206: }
6208: /*@
6209: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6210: this routine retains the old nonzero structure.
6212: Logically Collective
6214: Input Parameter:
6215: . mat - the matrix
6217: Level: intermediate
6219: Note:
6220: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6221: See the Performance chapter of the users manual for information on preallocating matrices.
6223: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6224: @*/
6225: PetscErrorCode MatZeroEntries(Mat mat)
6226: {
6227: PetscFunctionBegin;
6230: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6231: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6232: MatCheckPreallocated(mat, 1);
6234: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6235: PetscUseTypeMethod(mat, zeroentries);
6236: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6237: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6238: PetscFunctionReturn(PETSC_SUCCESS);
6239: }
6241: /*@
6242: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6243: of a set of rows and columns of a matrix.
6245: Collective
6247: Input Parameters:
6248: + mat - the matrix
6249: . numRows - the number of rows/columns to zero
6250: . rows - the global row indices
6251: . diag - value put in the diagonal of the eliminated rows
6252: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6253: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6255: Level: intermediate
6257: Notes:
6258: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6260: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6261: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6263: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6264: Krylov method to take advantage of the known solution on the zeroed rows.
6266: For the parallel case, all processes that share the matrix (i.e.,
6267: those in the communicator used for matrix creation) MUST call this
6268: routine, regardless of whether any rows being zeroed are owned by
6269: them.
6271: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6272: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6273: missing.
6275: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6276: list only rows local to itself).
6278: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6280: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6281: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6282: @*/
6283: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6284: {
6285: PetscFunctionBegin;
6288: if (numRows) PetscAssertPointer(rows, 3);
6289: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6290: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6291: MatCheckPreallocated(mat, 1);
6293: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6294: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6295: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6296: PetscFunctionReturn(PETSC_SUCCESS);
6297: }
6299: /*@
6300: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6301: of a set of rows and columns of a matrix.
6303: Collective
6305: Input Parameters:
6306: + mat - the matrix
6307: . is - the rows to zero
6308: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6309: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6310: - b - optional vector of right-hand side, that will be adjusted by provided solution
6312: Level: intermediate
6314: Note:
6315: See `MatZeroRowsColumns()` for details on how this routine operates.
6317: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6318: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6319: @*/
6320: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6321: {
6322: PetscInt numRows;
6323: const PetscInt *rows;
6325: PetscFunctionBegin;
6330: PetscCall(ISGetLocalSize(is, &numRows));
6331: PetscCall(ISGetIndices(is, &rows));
6332: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6333: PetscCall(ISRestoreIndices(is, &rows));
6334: PetscFunctionReturn(PETSC_SUCCESS);
6335: }
6337: /*@
6338: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6339: of a set of rows of a matrix.
6341: Collective
6343: Input Parameters:
6344: + mat - the matrix
6345: . numRows - the number of rows to zero
6346: . rows - the global row indices
6347: . diag - value put in the diagonal of the zeroed rows
6348: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6349: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6351: Level: intermediate
6353: Notes:
6354: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6356: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6358: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6359: Krylov method to take advantage of the known solution on the zeroed rows.
6361: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6362: from the matrix.
6364: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6365: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense
6366: formats this does not alter the nonzero structure.
6368: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6369: of the matrix is not changed the values are
6370: merely zeroed.
6372: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6373: formats can optionally remove the main diagonal entry from the
6374: nonzero structure as well, by passing 0.0 as the final argument).
6376: For the parallel case, all processes that share the matrix (i.e.,
6377: those in the communicator used for matrix creation) MUST call this
6378: routine, regardless of whether any rows being zeroed are owned by
6379: them.
6381: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6382: list only rows local to itself).
6384: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6385: owns that are to be zeroed. This saves a global synchronization in the implementation.
6387: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6388: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6389: @*/
6390: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6391: {
6392: PetscFunctionBegin;
6395: if (numRows) PetscAssertPointer(rows, 3);
6396: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6397: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6398: MatCheckPreallocated(mat, 1);
6400: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6401: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6402: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6403: PetscFunctionReturn(PETSC_SUCCESS);
6404: }
6406: /*@
6407: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6408: of a set of rows of a matrix indicated by an `IS`
6410: Collective
6412: Input Parameters:
6413: + mat - the matrix
6414: . is - index set, `IS`, of rows to remove (if `NULL` then no row is removed)
6415: . diag - value put in all diagonals of eliminated rows
6416: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6417: - b - optional vector of right-hand side, that will be adjusted by provided solution
6419: Level: intermediate
6421: Note:
6422: See `MatZeroRows()` for details on how this routine operates.
6424: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6425: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `IS`
6426: @*/
6427: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6428: {
6429: PetscInt numRows = 0;
6430: const PetscInt *rows = NULL;
6432: PetscFunctionBegin;
6435: if (is) {
6437: PetscCall(ISGetLocalSize(is, &numRows));
6438: PetscCall(ISGetIndices(is, &rows));
6439: }
6440: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6441: if (is) PetscCall(ISRestoreIndices(is, &rows));
6442: PetscFunctionReturn(PETSC_SUCCESS);
6443: }
6445: /*@
6446: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6447: of a set of rows of a matrix indicated by a `MatStencil`. These rows must be local to the process.
6449: Collective
6451: Input Parameters:
6452: + mat - the matrix
6453: . numRows - the number of rows to remove
6454: . rows - the grid coordinates (and component number when dof > 1) for matrix rows indicated by an array of `MatStencil`
6455: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6456: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6457: - b - optional vector of right-hand side, that will be adjusted by provided solution
6459: Level: intermediate
6461: Notes:
6462: See `MatZeroRows()` for details on how this routine operates.
6464: The grid coordinates are across the entire grid, not just the local portion
6466: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6467: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6468: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6469: `DM_BOUNDARY_PERIODIC` boundary type.
6471: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6472: a single value per point) you can skip filling those indices.
6474: Fortran Note:
6475: `idxm` and `idxn` should be declared as
6476: .vb
6477: MatStencil idxm(4, m)
6478: .ve
6479: and the values inserted using
6480: .vb
6481: idxm(MatStencil_i, 1) = i
6482: idxm(MatStencil_j, 1) = j
6483: idxm(MatStencil_k, 1) = k
6484: idxm(MatStencil_c, 1) = c
6485: etc
6486: .ve
6488: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6489: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6490: @*/
6491: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6492: {
6493: PetscInt dim = mat->stencil.dim;
6494: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6495: PetscInt *dims = mat->stencil.dims + 1;
6496: PetscInt *starts = mat->stencil.starts;
6497: PetscInt *dxm = (PetscInt *)rows;
6498: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6500: PetscFunctionBegin;
6503: if (numRows) PetscAssertPointer(rows, 3);
6505: PetscCall(PetscMalloc1(numRows, &jdxm));
6506: for (i = 0; i < numRows; ++i) {
6507: /* Skip unused dimensions (they are ordered k, j, i, c) */
6508: for (j = 0; j < 3 - sdim; ++j) dxm++;
6509: /* Local index in X dir */
6510: tmp = *dxm++ - starts[0];
6511: /* Loop over remaining dimensions */
6512: for (j = 0; j < dim - 1; ++j) {
6513: /* If nonlocal, set index to be negative */
6514: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6515: /* Update local index */
6516: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6517: }
6518: /* Skip component slot if necessary */
6519: if (mat->stencil.noc) dxm++;
6520: /* Local row number */
6521: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6522: }
6523: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6524: PetscCall(PetscFree(jdxm));
6525: PetscFunctionReturn(PETSC_SUCCESS);
6526: }
6528: /*@
6529: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6530: of a set of rows and columns of a matrix.
6532: Collective
6534: Input Parameters:
6535: + mat - the matrix
6536: . numRows - the number of rows/columns to remove
6537: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6538: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6539: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6540: - b - optional vector of right-hand side, that will be adjusted by provided solution
6542: Level: intermediate
6544: Notes:
6545: See `MatZeroRowsColumns()` for details on how this routine operates.
6547: The grid coordinates are across the entire grid, not just the local portion
6549: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6550: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6551: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6552: `DM_BOUNDARY_PERIODIC` boundary type.
6554: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6555: a single value per point) you can skip filling those indices.
6557: Fortran Note:
6558: `idxm` and `idxn` should be declared as
6559: .vb
6560: MatStencil idxm(4, m)
6561: .ve
6562: and the values inserted using
6563: .vb
6564: idxm(MatStencil_i, 1) = i
6565: idxm(MatStencil_j, 1) = j
6566: idxm(MatStencil_k, 1) = k
6567: idxm(MatStencil_c, 1) = c
6568: etc
6569: .ve
6571: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6572: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6573: @*/
6574: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6575: {
6576: PetscInt dim = mat->stencil.dim;
6577: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6578: PetscInt *dims = mat->stencil.dims + 1;
6579: PetscInt *starts = mat->stencil.starts;
6580: PetscInt *dxm = (PetscInt *)rows;
6581: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6583: PetscFunctionBegin;
6586: if (numRows) PetscAssertPointer(rows, 3);
6588: PetscCall(PetscMalloc1(numRows, &jdxm));
6589: for (i = 0; i < numRows; ++i) {
6590: /* Skip unused dimensions (they are ordered k, j, i, c) */
6591: for (j = 0; j < 3 - sdim; ++j) dxm++;
6592: /* Local index in X dir */
6593: tmp = *dxm++ - starts[0];
6594: /* Loop over remaining dimensions */
6595: for (j = 0; j < dim - 1; ++j) {
6596: /* If nonlocal, set index to be negative */
6597: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6598: /* Update local index */
6599: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6600: }
6601: /* Skip component slot if necessary */
6602: if (mat->stencil.noc) dxm++;
6603: /* Local row number */
6604: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6605: }
6606: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6607: PetscCall(PetscFree(jdxm));
6608: PetscFunctionReturn(PETSC_SUCCESS);
6609: }
6611: /*@
6612: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6613: of a set of rows of a matrix; using local numbering of rows.
6615: Collective
6617: Input Parameters:
6618: + mat - the matrix
6619: . numRows - the number of rows to remove
6620: . rows - the local row indices
6621: . diag - value put in all diagonals of eliminated rows
6622: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6623: - b - optional vector of right-hand side, that will be adjusted by provided solution
6625: Level: intermediate
6627: Notes:
6628: Before calling `MatZeroRowsLocal()`, the user must first set the
6629: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6631: See `MatZeroRows()` for details on how this routine operates.
6633: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6634: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6635: @*/
6636: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6637: {
6638: PetscFunctionBegin;
6641: if (numRows) PetscAssertPointer(rows, 3);
6642: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6643: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6644: MatCheckPreallocated(mat, 1);
6646: if (mat->ops->zerorowslocal) {
6647: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6648: } else {
6649: IS is, newis;
6650: PetscInt *newRows, nl = 0;
6652: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6653: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_USE_POINTER, &is));
6654: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6655: PetscCall(ISGetIndices(newis, (const PetscInt **)&newRows));
6656: for (PetscInt i = 0; i < numRows; i++)
6657: if (newRows[i] > -1) newRows[nl++] = newRows[i];
6658: PetscUseTypeMethod(mat, zerorows, nl, newRows, diag, x, b);
6659: PetscCall(ISRestoreIndices(newis, (const PetscInt **)&newRows));
6660: PetscCall(ISDestroy(&newis));
6661: PetscCall(ISDestroy(&is));
6662: }
6663: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6664: PetscFunctionReturn(PETSC_SUCCESS);
6665: }
6667: /*@
6668: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6669: of a set of rows of a matrix; using local numbering of rows.
6671: Collective
6673: Input Parameters:
6674: + mat - the matrix
6675: . is - index set of rows to remove
6676: . diag - value put in all diagonals of eliminated rows
6677: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6678: - b - optional vector of right-hand side, that will be adjusted by provided solution
6680: Level: intermediate
6682: Notes:
6683: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6684: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6686: See `MatZeroRows()` for details on how this routine operates.
6688: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6689: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6690: @*/
6691: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6692: {
6693: PetscInt numRows;
6694: const PetscInt *rows;
6696: PetscFunctionBegin;
6700: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6701: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6702: MatCheckPreallocated(mat, 1);
6704: PetscCall(ISGetLocalSize(is, &numRows));
6705: PetscCall(ISGetIndices(is, &rows));
6706: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6707: PetscCall(ISRestoreIndices(is, &rows));
6708: PetscFunctionReturn(PETSC_SUCCESS);
6709: }
6711: /*@
6712: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6713: of a set of rows and columns of a matrix; using local numbering of rows.
6715: Collective
6717: Input Parameters:
6718: + mat - the matrix
6719: . numRows - the number of rows to remove
6720: . rows - the global row indices
6721: . diag - value put in all diagonals of eliminated rows
6722: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6723: - b - optional vector of right-hand side, that will be adjusted by provided solution
6725: Level: intermediate
6727: Notes:
6728: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6729: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6731: See `MatZeroRowsColumns()` for details on how this routine operates.
6733: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6734: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6735: @*/
6736: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6737: {
6738: PetscFunctionBegin;
6741: if (numRows) PetscAssertPointer(rows, 3);
6742: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6743: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6744: MatCheckPreallocated(mat, 1);
6746: if (mat->ops->zerorowscolumnslocal) {
6747: PetscUseTypeMethod(mat, zerorowscolumnslocal, numRows, rows, diag, x, b);
6748: } else {
6749: IS is, newis;
6750: PetscInt *newRows, nl = 0;
6752: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6753: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_USE_POINTER, &is));
6754: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6755: PetscCall(ISGetIndices(newis, (const PetscInt **)&newRows));
6756: for (PetscInt i = 0; i < numRows; i++)
6757: if (newRows[i] > -1) newRows[nl++] = newRows[i];
6758: PetscUseTypeMethod(mat, zerorowscolumns, nl, newRows, diag, x, b);
6759: PetscCall(ISRestoreIndices(newis, (const PetscInt **)&newRows));
6760: PetscCall(ISDestroy(&newis));
6761: PetscCall(ISDestroy(&is));
6762: }
6763: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6764: PetscFunctionReturn(PETSC_SUCCESS);
6765: }
6767: /*@
6768: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6769: of a set of rows and columns of a matrix; using local numbering of rows.
6771: Collective
6773: Input Parameters:
6774: + mat - the matrix
6775: . is - index set of rows to remove
6776: . diag - value put in all diagonals of eliminated rows
6777: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6778: - b - optional vector of right-hand side, that will be adjusted by provided solution
6780: Level: intermediate
6782: Notes:
6783: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6784: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6786: See `MatZeroRowsColumns()` for details on how this routine operates.
6788: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6789: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6790: @*/
6791: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6792: {
6793: PetscInt numRows;
6794: const PetscInt *rows;
6796: PetscFunctionBegin;
6800: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6801: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6802: MatCheckPreallocated(mat, 1);
6804: PetscCall(ISGetLocalSize(is, &numRows));
6805: PetscCall(ISGetIndices(is, &rows));
6806: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6807: PetscCall(ISRestoreIndices(is, &rows));
6808: PetscFunctionReturn(PETSC_SUCCESS);
6809: }
6811: /*@
6812: MatGetSize - Returns the numbers of rows and columns in a matrix.
6814: Not Collective
6816: Input Parameter:
6817: . mat - the matrix
6819: Output Parameters:
6820: + m - the number of global rows
6821: - n - the number of global columns
6823: Level: beginner
6825: Note:
6826: Both output parameters can be `NULL` on input.
6828: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6829: @*/
6830: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6831: {
6832: PetscFunctionBegin;
6834: if (m) *m = mat->rmap->N;
6835: if (n) *n = mat->cmap->N;
6836: PetscFunctionReturn(PETSC_SUCCESS);
6837: }
6839: /*@
6840: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6841: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6843: Not Collective
6845: Input Parameter:
6846: . mat - the matrix
6848: Output Parameters:
6849: + m - the number of local rows, use `NULL` to not obtain this value
6850: - n - the number of local columns, use `NULL` to not obtain this value
6852: Level: beginner
6854: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6855: @*/
6856: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6857: {
6858: PetscFunctionBegin;
6860: if (m) PetscAssertPointer(m, 2);
6861: if (n) PetscAssertPointer(n, 3);
6862: if (m) *m = mat->rmap->n;
6863: if (n) *n = mat->cmap->n;
6864: PetscFunctionReturn(PETSC_SUCCESS);
6865: }
6867: /*@
6868: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6869: vector one multiplies this matrix by that are owned by this processor.
6871: Not Collective, unless matrix has not been allocated, then collective
6873: Input Parameter:
6874: . mat - the matrix
6876: Output Parameters:
6877: + m - the global index of the first local column, use `NULL` to not obtain this value
6878: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6880: Level: developer
6882: Notes:
6883: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6885: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6886: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6888: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6889: the local values in the matrix.
6891: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6892: Layouts](sec_matlayout) for details on matrix layouts.
6894: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6895: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6896: @*/
6897: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6898: {
6899: PetscFunctionBegin;
6902: if (m) PetscAssertPointer(m, 2);
6903: if (n) PetscAssertPointer(n, 3);
6904: MatCheckPreallocated(mat, 1);
6905: if (m) *m = mat->cmap->rstart;
6906: if (n) *n = mat->cmap->rend;
6907: PetscFunctionReturn(PETSC_SUCCESS);
6908: }
6910: /*@
6911: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6912: this MPI process.
6914: Not Collective
6916: Input Parameter:
6917: . mat - the matrix
6919: Output Parameters:
6920: + m - the global index of the first local row, use `NULL` to not obtain this value
6921: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6923: Level: beginner
6925: Notes:
6926: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6928: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6929: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6931: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6932: the local values in the matrix.
6934: The high argument is one more than the last element stored locally.
6936: For all matrices it returns the range of matrix rows associated with rows of a vector that
6937: would contain the result of a matrix vector product with this matrix. See [Matrix
6938: Layouts](sec_matlayout) for details on matrix layouts.
6940: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
6941: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6942: @*/
6943: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6944: {
6945: PetscFunctionBegin;
6948: if (m) PetscAssertPointer(m, 2);
6949: if (n) PetscAssertPointer(n, 3);
6950: MatCheckPreallocated(mat, 1);
6951: if (m) *m = mat->rmap->rstart;
6952: if (n) *n = mat->rmap->rend;
6953: PetscFunctionReturn(PETSC_SUCCESS);
6954: }
6956: /*@C
6957: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6958: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6960: Not Collective, unless matrix has not been allocated
6962: Input Parameter:
6963: . mat - the matrix
6965: Output Parameter:
6966: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
6967: where `size` is the number of MPI processes used by `mat`
6969: Level: beginner
6971: Notes:
6972: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6974: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6975: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6977: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6978: the local values in the matrix.
6980: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6981: would contain the result of a matrix vector product with this matrix. See [Matrix
6982: Layouts](sec_matlayout) for details on matrix layouts.
6984: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6985: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
6986: `DMDAGetGhostCorners()`, `DM`
6987: @*/
6988: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
6989: {
6990: PetscFunctionBegin;
6993: MatCheckPreallocated(mat, 1);
6994: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6995: PetscFunctionReturn(PETSC_SUCCESS);
6996: }
6998: /*@C
6999: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
7000: vector one multiplies this vector by that are owned by each processor.
7002: Not Collective, unless matrix has not been allocated
7004: Input Parameter:
7005: . mat - the matrix
7007: Output Parameter:
7008: . ranges - start of each processors portion plus one more than the total length at the end
7010: Level: beginner
7012: Notes:
7013: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7015: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7016: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7018: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7019: the local values in the matrix.
7021: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
7022: Layouts](sec_matlayout) for details on matrix layouts.
7024: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
7025: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
7026: `DMDAGetGhostCorners()`, `DM`
7027: @*/
7028: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
7029: {
7030: PetscFunctionBegin;
7033: MatCheckPreallocated(mat, 1);
7034: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
7035: PetscFunctionReturn(PETSC_SUCCESS);
7036: }
7038: /*@
7039: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7041: Not Collective
7043: Input Parameter:
7044: . A - matrix
7046: Output Parameters:
7047: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7048: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7050: Level: intermediate
7052: Note:
7053: You should call `ISDestroy()` on the returned `IS`
7055: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7056: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7057: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7058: details on matrix layouts.
7060: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7061: @*/
7062: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7063: {
7064: PetscErrorCode (*f)(Mat, IS *, IS *);
7066: PetscFunctionBegin;
7069: MatCheckPreallocated(A, 1);
7070: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7071: if (f) {
7072: PetscCall((*f)(A, rows, cols));
7073: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7074: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7075: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7076: }
7077: PetscFunctionReturn(PETSC_SUCCESS);
7078: }
7080: /*@
7081: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7082: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7083: to complete the factorization.
7085: Collective
7087: Input Parameters:
7088: + fact - the factorized matrix obtained with `MatGetFactor()`
7089: . mat - the matrix
7090: . row - row permutation
7091: . col - column permutation
7092: - info - structure containing
7093: .vb
7094: levels - number of levels of fill.
7095: expected fill - as ratio of original fill.
7096: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7097: missing diagonal entries)
7098: .ve
7100: Level: developer
7102: Notes:
7103: See [Matrix Factorization](sec_matfactor) for additional information.
7105: Most users should employ the `KSP` interface for linear solvers
7106: instead of working directly with matrix algebra routines such as this.
7107: See, e.g., `KSPCreate()`.
7109: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7111: Fortran Note:
7112: A valid (non-null) `info` argument must be provided
7114: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
7115: `MatGetOrdering()`, `MatFactorInfo`
7116: @*/
7117: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7118: {
7119: PetscFunctionBegin;
7124: PetscAssertPointer(info, 5);
7125: PetscAssertPointer(fact, 1);
7126: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7127: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7128: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7129: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7130: MatCheckPreallocated(mat, 2);
7132: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7133: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7134: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7135: PetscFunctionReturn(PETSC_SUCCESS);
7136: }
7138: /*@
7139: MatICCFactorSymbolic - Performs symbolic incomplete
7140: Cholesky factorization for a symmetric matrix. Use
7141: `MatCholeskyFactorNumeric()` to complete the factorization.
7143: Collective
7145: Input Parameters:
7146: + fact - the factorized matrix obtained with `MatGetFactor()`
7147: . mat - the matrix to be factored
7148: . perm - row and column permutation
7149: - info - structure containing
7150: .vb
7151: levels - number of levels of fill.
7152: expected fill - as ratio of original fill.
7153: .ve
7155: Level: developer
7157: Notes:
7158: Most users should employ the `KSP` interface for linear solvers
7159: instead of working directly with matrix algebra routines such as this.
7160: See, e.g., `KSPCreate()`.
7162: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7164: Fortran Note:
7165: A valid (non-null) `info` argument must be provided
7167: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7168: @*/
7169: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7170: {
7171: PetscFunctionBegin;
7175: PetscAssertPointer(info, 4);
7176: PetscAssertPointer(fact, 1);
7177: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7178: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7179: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7180: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7181: MatCheckPreallocated(mat, 2);
7183: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7184: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7185: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7186: PetscFunctionReturn(PETSC_SUCCESS);
7187: }
7189: /*@C
7190: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7191: points to an array of valid matrices, they may be reused to store the new
7192: submatrices.
7194: Collective
7196: Input Parameters:
7197: + mat - the matrix
7198: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7199: . irow - index set of rows to extract
7200: . icol - index set of columns to extract
7201: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7203: Output Parameter:
7204: . submat - the array of submatrices
7206: Level: advanced
7208: Notes:
7209: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7210: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7211: to extract a parallel submatrix.
7213: Some matrix types place restrictions on the row and column
7214: indices, such as that they be sorted or that they be equal to each other.
7216: The index sets may not have duplicate entries.
7218: When extracting submatrices from a parallel matrix, each processor can
7219: form a different submatrix by setting the rows and columns of its
7220: individual index sets according to the local submatrix desired.
7222: When finished using the submatrices, the user should destroy
7223: them with `MatDestroySubMatrices()`.
7225: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7226: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7228: This routine creates the matrices in submat; you should NOT create them before
7229: calling it. It also allocates the array of matrix pointers submat.
7231: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7232: request one row/column in a block, they must request all rows/columns that are in
7233: that block. For example, if the block size is 2 you cannot request just row 0 and
7234: column 0.
7236: Fortran Note:
7237: .vb
7238: Mat, pointer :: submat(:)
7239: .ve
7241: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7242: @*/
7243: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7244: {
7245: PetscInt i;
7246: PetscBool eq;
7248: PetscFunctionBegin;
7251: if (n) {
7252: PetscAssertPointer(irow, 3);
7254: PetscAssertPointer(icol, 4);
7256: }
7257: PetscAssertPointer(submat, 6);
7258: if (n && scall == MAT_REUSE_MATRIX) {
7259: PetscAssertPointer(*submat, 6);
7261: }
7262: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7263: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7264: MatCheckPreallocated(mat, 1);
7265: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7266: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7267: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7268: for (i = 0; i < n; i++) {
7269: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7270: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7271: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7272: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7273: if (mat->boundtocpu && mat->bindingpropagates) {
7274: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7275: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7276: }
7277: #endif
7278: }
7279: PetscFunctionReturn(PETSC_SUCCESS);
7280: }
7282: /*@C
7283: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of `mat` (by pairs of `IS` that may live on subcomms).
7285: Collective
7287: Input Parameters:
7288: + mat - the matrix
7289: . n - the number of submatrixes to be extracted
7290: . irow - index set of rows to extract
7291: . icol - index set of columns to extract
7292: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7294: Output Parameter:
7295: . submat - the array of submatrices
7297: Level: advanced
7299: Note:
7300: This is used by `PCGASM`
7302: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7303: @*/
7304: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7305: {
7306: PetscInt i;
7307: PetscBool eq;
7309: PetscFunctionBegin;
7312: if (n) {
7313: PetscAssertPointer(irow, 3);
7315: PetscAssertPointer(icol, 4);
7317: }
7318: PetscAssertPointer(submat, 6);
7319: if (n && scall == MAT_REUSE_MATRIX) {
7320: PetscAssertPointer(*submat, 6);
7322: }
7323: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7324: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7325: MatCheckPreallocated(mat, 1);
7327: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7328: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7329: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7330: for (i = 0; i < n; i++) {
7331: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7332: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7333: }
7334: PetscFunctionReturn(PETSC_SUCCESS);
7335: }
7337: /*@C
7338: MatDestroyMatrices - Destroys an array of matrices
7340: Collective
7342: Input Parameters:
7343: + n - the number of local matrices
7344: - mat - the matrices (this is a pointer to the array of matrices)
7346: Level: advanced
7348: Notes:
7349: Frees not only the matrices, but also the array that contains the matrices
7351: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7353: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7354: @*/
7355: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7356: {
7357: PetscInt i;
7359: PetscFunctionBegin;
7360: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7361: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7362: PetscAssertPointer(mat, 2);
7364: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7366: /* memory is allocated even if n = 0 */
7367: PetscCall(PetscFree(*mat));
7368: PetscFunctionReturn(PETSC_SUCCESS);
7369: }
7371: /*@C
7372: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7374: Collective
7376: Input Parameters:
7377: + n - the number of local matrices
7378: - mat - the matrices (this is a pointer to the array of matrices, to match the calling sequence of `MatCreateSubMatrices()`)
7380: Level: advanced
7382: Note:
7383: Frees not only the matrices, but also the array that contains the matrices
7385: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7386: @*/
7387: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7388: {
7389: Mat mat0;
7391: PetscFunctionBegin;
7392: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7393: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7394: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7395: PetscAssertPointer(mat, 2);
7397: mat0 = (*mat)[0];
7398: if (mat0 && mat0->ops->destroysubmatrices) {
7399: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7400: } else {
7401: PetscCall(MatDestroyMatrices(n, mat));
7402: }
7403: PetscFunctionReturn(PETSC_SUCCESS);
7404: }
7406: /*@
7407: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7409: Collective
7411: Input Parameter:
7412: . mat - the matrix
7414: Output Parameter:
7415: . matstruct - the sequential matrix with the nonzero structure of `mat`
7417: Level: developer
7419: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7420: @*/
7421: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7422: {
7423: PetscFunctionBegin;
7425: PetscAssertPointer(matstruct, 2);
7428: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7429: MatCheckPreallocated(mat, 1);
7431: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7432: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7433: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7434: PetscFunctionReturn(PETSC_SUCCESS);
7435: }
7437: /*@C
7438: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7440: Collective
7442: Input Parameter:
7443: . mat - the matrix
7445: Level: advanced
7447: Note:
7448: This is not needed, one can just call `MatDestroy()`
7450: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7451: @*/
7452: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7453: {
7454: PetscFunctionBegin;
7455: PetscAssertPointer(mat, 1);
7456: PetscCall(MatDestroy(mat));
7457: PetscFunctionReturn(PETSC_SUCCESS);
7458: }
7460: /*@
7461: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7462: replaces the index sets by larger ones that represent submatrices with
7463: additional overlap.
7465: Collective
7467: Input Parameters:
7468: + mat - the matrix
7469: . n - the number of index sets
7470: . is - the array of index sets (these index sets will changed during the call)
7471: - ov - the additional overlap requested
7473: Options Database Key:
7474: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7476: Level: developer
7478: Note:
7479: The computed overlap preserves the matrix block sizes when the blocks are square.
7480: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7481: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7483: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7484: @*/
7485: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7486: {
7487: PetscInt i, bs, cbs;
7489: PetscFunctionBegin;
7493: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7494: if (n) {
7495: PetscAssertPointer(is, 3);
7497: }
7498: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7499: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7500: MatCheckPreallocated(mat, 1);
7502: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7503: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7504: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7505: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7506: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7507: if (bs == cbs) {
7508: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7509: }
7510: PetscFunctionReturn(PETSC_SUCCESS);
7511: }
7513: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7515: /*@
7516: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7517: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7518: additional overlap.
7520: Collective
7522: Input Parameters:
7523: + mat - the matrix
7524: . n - the number of index sets
7525: . is - the array of index sets (these index sets will changed during the call)
7526: - ov - the additional overlap requested
7528: ` Options Database Key:
7529: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7531: Level: developer
7533: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7534: @*/
7535: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7536: {
7537: PetscInt i;
7539: PetscFunctionBegin;
7542: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7543: if (n) {
7544: PetscAssertPointer(is, 3);
7546: }
7547: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7548: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7549: MatCheckPreallocated(mat, 1);
7550: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7551: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7552: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7553: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7554: PetscFunctionReturn(PETSC_SUCCESS);
7555: }
7557: /*@
7558: MatGetBlockSize - Returns the matrix block size.
7560: Not Collective
7562: Input Parameter:
7563: . mat - the matrix
7565: Output Parameter:
7566: . bs - block size
7568: Level: intermediate
7570: Notes:
7571: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7573: If the block size has not been set yet this routine returns 1.
7575: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7576: @*/
7577: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7578: {
7579: PetscFunctionBegin;
7581: PetscAssertPointer(bs, 2);
7582: *bs = mat->rmap->bs;
7583: PetscFunctionReturn(PETSC_SUCCESS);
7584: }
7586: /*@
7587: MatGetBlockSizes - Returns the matrix block row and column sizes.
7589: Not Collective
7591: Input Parameter:
7592: . mat - the matrix
7594: Output Parameters:
7595: + rbs - row block size
7596: - cbs - column block size
7598: Level: intermediate
7600: Notes:
7601: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7602: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7604: If a block size has not been set yet this routine returns 1.
7606: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7607: @*/
7608: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7609: {
7610: PetscFunctionBegin;
7612: if (rbs) PetscAssertPointer(rbs, 2);
7613: if (cbs) PetscAssertPointer(cbs, 3);
7614: if (rbs) *rbs = mat->rmap->bs;
7615: if (cbs) *cbs = mat->cmap->bs;
7616: PetscFunctionReturn(PETSC_SUCCESS);
7617: }
7619: /*@
7620: MatSetBlockSize - Sets the matrix block size.
7622: Logically Collective
7624: Input Parameters:
7625: + mat - the matrix
7626: - bs - block size
7628: Level: intermediate
7630: Notes:
7631: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7632: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7634: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7635: is compatible with the matrix local sizes.
7637: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7638: @*/
7639: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7640: {
7641: PetscFunctionBegin;
7644: PetscCall(MatSetBlockSizes(mat, bs, bs));
7645: PetscFunctionReturn(PETSC_SUCCESS);
7646: }
7648: typedef struct {
7649: PetscInt n;
7650: IS *is;
7651: Mat *mat;
7652: PetscObjectState nonzerostate;
7653: Mat C;
7654: } EnvelopeData;
7656: static PetscErrorCode EnvelopeDataDestroy(void **ptr)
7657: {
7658: EnvelopeData *edata = (EnvelopeData *)*ptr;
7660: PetscFunctionBegin;
7661: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7662: PetscCall(PetscFree(edata->is));
7663: PetscCall(PetscFree(edata));
7664: PetscFunctionReturn(PETSC_SUCCESS);
7665: }
7667: /*@
7668: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7669: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7671: Collective
7673: Input Parameter:
7674: . mat - the matrix
7676: Level: intermediate
7678: Notes:
7679: There can be zeros within the blocks
7681: The blocks can overlap between processes, including laying on more than two processes
7683: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7684: @*/
7685: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7686: {
7687: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7688: PetscInt *diag, *odiag, sc;
7689: VecScatter scatter;
7690: PetscScalar *seqv;
7691: const PetscScalar *parv;
7692: const PetscInt *ia, *ja;
7693: PetscBool set, flag, done;
7694: Mat AA = mat, A;
7695: MPI_Comm comm;
7696: PetscMPIInt rank, size, tag;
7697: MPI_Status status;
7698: PetscContainer container;
7699: EnvelopeData *edata;
7700: Vec seq, par;
7701: IS isglobal;
7703: PetscFunctionBegin;
7705: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7706: if (!set || !flag) {
7707: /* TODO: only needs nonzero structure of transpose */
7708: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7709: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7710: }
7711: PetscCall(MatAIJGetLocalMat(AA, &A));
7712: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7713: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7715: PetscCall(MatGetLocalSize(mat, &n, NULL));
7716: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7717: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7718: PetscCallMPI(MPI_Comm_size(comm, &size));
7719: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7721: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7723: if (rank > 0) {
7724: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7725: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7726: }
7727: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7728: for (i = 0; i < n; i++) {
7729: env = PetscMax(env, ja[ia[i + 1] - 1]);
7730: II = rstart + i;
7731: if (env == II) {
7732: starts[lblocks] = tbs;
7733: sizes[lblocks++] = 1 + II - tbs;
7734: tbs = 1 + II;
7735: }
7736: }
7737: if (rank < size - 1) {
7738: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7739: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7740: }
7742: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7743: if (!set || !flag) PetscCall(MatDestroy(&AA));
7744: PetscCall(MatDestroy(&A));
7746: PetscCall(PetscNew(&edata));
7747: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7748: edata->n = lblocks;
7749: /* create IS needed for extracting blocks from the original matrix */
7750: PetscCall(PetscMalloc1(lblocks, &edata->is));
7751: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7753: /* Create the resulting inverse matrix nonzero structure with preallocation information */
7754: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7755: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7756: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7757: PetscCall(MatSetType(edata->C, MATAIJ));
7759: /* Communicate the start and end of each row, from each block to the correct rank */
7760: /* TODO: Use PetscSF instead of VecScatter */
7761: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7762: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7763: PetscCall(VecGetArrayWrite(seq, &seqv));
7764: for (PetscInt i = 0; i < lblocks; i++) {
7765: for (PetscInt j = 0; j < sizes[i]; j++) {
7766: seqv[cnt] = starts[i];
7767: seqv[cnt + 1] = starts[i] + sizes[i];
7768: cnt += 2;
7769: }
7770: }
7771: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7772: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7773: sc -= cnt;
7774: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7775: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7776: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7777: PetscCall(ISDestroy(&isglobal));
7778: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7779: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7780: PetscCall(VecScatterDestroy(&scatter));
7781: PetscCall(VecDestroy(&seq));
7782: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7783: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7784: PetscCall(VecGetArrayRead(par, &parv));
7785: cnt = 0;
7786: PetscCall(MatGetSize(mat, NULL, &n));
7787: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7788: PetscInt start, end, d = 0, od = 0;
7790: start = (PetscInt)PetscRealPart(parv[cnt]);
7791: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7792: cnt += 2;
7794: if (start < cstart) {
7795: od += cstart - start + n - cend;
7796: d += cend - cstart;
7797: } else if (start < cend) {
7798: od += n - cend;
7799: d += cend - start;
7800: } else od += n - start;
7801: if (end <= cstart) {
7802: od -= cstart - end + n - cend;
7803: d -= cend - cstart;
7804: } else if (end < cend) {
7805: od -= n - cend;
7806: d -= cend - end;
7807: } else od -= n - end;
7809: odiag[i] = od;
7810: diag[i] = d;
7811: }
7812: PetscCall(VecRestoreArrayRead(par, &parv));
7813: PetscCall(VecDestroy(&par));
7814: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7815: PetscCall(PetscFree2(diag, odiag));
7816: PetscCall(PetscFree2(sizes, starts));
7818: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7819: PetscCall(PetscContainerSetPointer(container, edata));
7820: PetscCall(PetscContainerSetCtxDestroy(container, EnvelopeDataDestroy));
7821: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7822: PetscCall(PetscObjectDereference((PetscObject)container));
7823: PetscFunctionReturn(PETSC_SUCCESS);
7824: }
7826: /*@
7827: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7829: Collective
7831: Input Parameters:
7832: + A - the matrix
7833: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7835: Output Parameter:
7836: . C - matrix with inverted block diagonal of `A`
7838: Level: advanced
7840: Note:
7841: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7843: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7844: @*/
7845: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7846: {
7847: PetscContainer container;
7848: EnvelopeData *edata;
7849: PetscObjectState nonzerostate;
7851: PetscFunctionBegin;
7852: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7853: if (!container) {
7854: PetscCall(MatComputeVariableBlockEnvelope(A));
7855: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7856: }
7857: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7858: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7859: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7860: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7862: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7863: *C = edata->C;
7865: for (PetscInt i = 0; i < edata->n; i++) {
7866: Mat D;
7867: PetscScalar *dvalues;
7869: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7870: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7871: PetscCall(MatSeqDenseInvert(D));
7872: PetscCall(MatDenseGetArray(D, &dvalues));
7873: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7874: PetscCall(MatDestroy(&D));
7875: }
7876: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7877: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7878: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7879: PetscFunctionReturn(PETSC_SUCCESS);
7880: }
7882: /*@
7883: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7885: Not Collective
7887: Input Parameters:
7888: + mat - the matrix
7889: . nblocks - the number of blocks on this process, each block can only exist on a single process
7890: - bsizes - the block sizes
7892: Level: intermediate
7894: Notes:
7895: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7897: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7899: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7900: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7901: @*/
7902: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
7903: {
7904: PetscInt ncnt = 0, nlocal;
7906: PetscFunctionBegin;
7908: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7909: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
7910: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
7911: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7912: PetscCall(PetscFree(mat->bsizes));
7913: mat->nblocks = nblocks;
7914: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7915: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7916: PetscFunctionReturn(PETSC_SUCCESS);
7917: }
7919: /*@C
7920: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7922: Not Collective; No Fortran Support
7924: Input Parameter:
7925: . mat - the matrix
7927: Output Parameters:
7928: + nblocks - the number of blocks on this process
7929: - bsizes - the block sizes
7931: Level: intermediate
7933: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7934: @*/
7935: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
7936: {
7937: PetscFunctionBegin;
7939: if (nblocks) *nblocks = mat->nblocks;
7940: if (bsizes) *bsizes = mat->bsizes;
7941: PetscFunctionReturn(PETSC_SUCCESS);
7942: }
7944: /*@
7945: MatSelectVariableBlockSizes - When creating a submatrix, pass on the variable block sizes
7947: Not Collective
7949: Input Parameter:
7950: + subA - the submatrix
7951: . A - the original matrix
7952: - isrow - The `IS` of selected rows for the submatrix, must be sorted
7954: Level: developer
7956: Notes:
7957: If the index set is not sorted or contains off-process entries, this function will do nothing.
7959: .seealso: [](ch_matrices), `Mat`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7960: @*/
7961: PetscErrorCode MatSelectVariableBlockSizes(Mat subA, Mat A, IS isrow)
7962: {
7963: const PetscInt *rows;
7964: PetscInt n, rStart, rEnd, Nb = 0;
7965: PetscBool flg = A->bsizes ? PETSC_TRUE : PETSC_FALSE;
7967: PetscFunctionBegin;
7968: // The code for block size extraction does not support an unsorted IS
7969: if (flg) PetscCall(ISSorted(isrow, &flg));
7970: // We don't support originally off-diagonal blocks
7971: if (flg) {
7972: PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
7973: PetscCall(ISGetLocalSize(isrow, &n));
7974: PetscCall(ISGetIndices(isrow, &rows));
7975: for (PetscInt i = 0; i < n && flg; ++i) {
7976: if (rows[i] < rStart || rows[i] >= rEnd) flg = PETSC_FALSE;
7977: }
7978: PetscCall(ISRestoreIndices(isrow, &rows));
7979: }
7980: // quiet return if we can't extract block size
7981: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)subA)));
7982: if (!flg) PetscFunctionReturn(PETSC_SUCCESS);
7984: // extract block sizes
7985: PetscCall(ISGetIndices(isrow, &rows));
7986: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
7987: PetscBool occupied = PETSC_FALSE;
7989: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
7990: const PetscInt row = gr + br;
7992: if (i == n) break;
7993: if (rows[i] == row) {
7994: occupied = PETSC_TRUE;
7995: ++i;
7996: }
7997: while (i < n && rows[i] < row) ++i;
7998: }
7999: gr += A->bsizes[b];
8000: if (occupied) ++Nb;
8001: }
8002: subA->nblocks = Nb;
8003: PetscCall(PetscFree(subA->bsizes));
8004: PetscCall(PetscMalloc1(subA->nblocks, &subA->bsizes));
8005: PetscInt sb = 0;
8006: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
8007: if (sb < subA->nblocks) subA->bsizes[sb] = 0;
8008: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
8009: const PetscInt row = gr + br;
8011: if (i == n) break;
8012: if (rows[i] == row) {
8013: ++subA->bsizes[sb];
8014: ++i;
8015: }
8016: while (i < n && rows[i] < row) ++i;
8017: }
8018: gr += A->bsizes[b];
8019: if (sb < subA->nblocks && subA->bsizes[sb]) ++sb;
8020: }
8021: PetscCheck(sb == subA->nblocks, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of blocks %" PetscInt_FMT " != %" PetscInt_FMT, sb, subA->nblocks);
8022: PetscInt nlocal, ncnt = 0;
8023: PetscCall(MatGetLocalSize(subA, &nlocal, NULL));
8024: PetscCheck(subA->nblocks >= 0 && subA->nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", subA->nblocks, nlocal);
8025: for (PetscInt i = 0; i < subA->nblocks; i++) ncnt += subA->bsizes[i];
8026: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
8027: PetscCall(ISRestoreIndices(isrow, &rows));
8028: PetscFunctionReturn(PETSC_SUCCESS);
8029: }
8031: /*@
8032: MatSetBlockSizes - Sets the matrix block row and column sizes.
8034: Logically Collective
8036: Input Parameters:
8037: + mat - the matrix
8038: . rbs - row block size
8039: - cbs - column block size
8041: Level: intermediate
8043: Notes:
8044: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
8045: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
8046: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
8048: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
8049: are compatible with the matrix local sizes.
8051: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
8053: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
8054: @*/
8055: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
8056: {
8057: PetscFunctionBegin;
8061: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
8062: if (mat->rmap->refcnt) {
8063: ISLocalToGlobalMapping l2g = NULL;
8064: PetscLayout nmap = NULL;
8066: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
8067: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
8068: PetscCall(PetscLayoutDestroy(&mat->rmap));
8069: mat->rmap = nmap;
8070: mat->rmap->mapping = l2g;
8071: }
8072: if (mat->cmap->refcnt) {
8073: ISLocalToGlobalMapping l2g = NULL;
8074: PetscLayout nmap = NULL;
8076: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
8077: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
8078: PetscCall(PetscLayoutDestroy(&mat->cmap));
8079: mat->cmap = nmap;
8080: mat->cmap->mapping = l2g;
8081: }
8082: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
8083: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
8084: PetscFunctionReturn(PETSC_SUCCESS);
8085: }
8087: /*@
8088: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
8090: Logically Collective
8092: Input Parameters:
8093: + mat - the matrix
8094: . fromRow - matrix from which to copy row block size
8095: - fromCol - matrix from which to copy column block size (can be same as fromRow)
8097: Level: developer
8099: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
8100: @*/
8101: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
8102: {
8103: PetscFunctionBegin;
8107: PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
8108: PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
8109: PetscFunctionReturn(PETSC_SUCCESS);
8110: }
8112: /*@
8113: MatResidual - Default routine to calculate the residual r = b - Ax
8115: Collective
8117: Input Parameters:
8118: + mat - the matrix
8119: . b - the right-hand-side
8120: - x - the approximate solution
8122: Output Parameter:
8123: . r - location to store the residual
8125: Level: developer
8127: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8128: @*/
8129: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8130: {
8131: PetscFunctionBegin;
8137: MatCheckPreallocated(mat, 1);
8138: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8139: if (!mat->ops->residual) {
8140: PetscCall(MatMult(mat, x, r));
8141: PetscCall(VecAYPX(r, -1.0, b));
8142: } else {
8143: PetscUseTypeMethod(mat, residual, b, x, r);
8144: }
8145: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8146: PetscFunctionReturn(PETSC_SUCCESS);
8147: }
8149: /*@C
8150: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8152: Collective
8154: Input Parameters:
8155: + mat - the matrix
8156: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8157: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8158: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8159: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8160: always used.
8162: Output Parameters:
8163: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8164: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8165: . ja - the column indices, use `NULL` if not needed
8166: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8167: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8169: Level: developer
8171: Notes:
8172: You CANNOT change any of the ia[] or ja[] values.
8174: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8176: Fortran Notes:
8177: Use
8178: .vb
8179: PetscInt, pointer :: ia(:),ja(:)
8180: call MatGetRowIJ(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8181: ! Access the ith and jth entries via ia(i) and ja(j)
8182: .ve
8184: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8185: @*/
8186: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8187: {
8188: PetscFunctionBegin;
8191: if (n) PetscAssertPointer(n, 5);
8192: if (ia) PetscAssertPointer(ia, 6);
8193: if (ja) PetscAssertPointer(ja, 7);
8194: if (done) PetscAssertPointer(done, 8);
8195: MatCheckPreallocated(mat, 1);
8196: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8197: else {
8198: if (done) *done = PETSC_TRUE;
8199: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8200: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8201: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8202: }
8203: PetscFunctionReturn(PETSC_SUCCESS);
8204: }
8206: /*@C
8207: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8209: Collective
8211: Input Parameters:
8212: + mat - the matrix
8213: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8214: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8215: symmetrized
8216: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8217: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8218: always used.
8220: Output Parameters:
8221: + n - number of columns in the (possibly compressed) matrix
8222: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8223: . ja - the row indices
8224: - done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8226: Level: developer
8228: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8229: @*/
8230: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8231: {
8232: PetscFunctionBegin;
8235: PetscAssertPointer(n, 5);
8236: if (ia) PetscAssertPointer(ia, 6);
8237: if (ja) PetscAssertPointer(ja, 7);
8238: PetscAssertPointer(done, 8);
8239: MatCheckPreallocated(mat, 1);
8240: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8241: else {
8242: *done = PETSC_TRUE;
8243: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8244: }
8245: PetscFunctionReturn(PETSC_SUCCESS);
8246: }
8248: /*@C
8249: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8251: Collective
8253: Input Parameters:
8254: + mat - the matrix
8255: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8256: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8257: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8258: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8259: always used.
8260: . n - size of (possibly compressed) matrix
8261: . ia - the row pointers
8262: - ja - the column indices
8264: Output Parameter:
8265: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8267: Level: developer
8269: Note:
8270: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8271: us of the array after it has been restored. If you pass `NULL`, it will
8272: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8274: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8275: @*/
8276: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8277: {
8278: PetscFunctionBegin;
8281: if (ia) PetscAssertPointer(ia, 6);
8282: if (ja) PetscAssertPointer(ja, 7);
8283: if (done) PetscAssertPointer(done, 8);
8284: MatCheckPreallocated(mat, 1);
8286: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8287: else {
8288: if (done) *done = PETSC_TRUE;
8289: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8290: if (n) *n = 0;
8291: if (ia) *ia = NULL;
8292: if (ja) *ja = NULL;
8293: }
8294: PetscFunctionReturn(PETSC_SUCCESS);
8295: }
8297: /*@C
8298: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8300: Collective
8302: Input Parameters:
8303: + mat - the matrix
8304: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8305: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8306: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8307: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8308: always used.
8310: Output Parameters:
8311: + n - size of (possibly compressed) matrix
8312: . ia - the column pointers
8313: . ja - the row indices
8314: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8316: Level: developer
8318: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8319: @*/
8320: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8321: {
8322: PetscFunctionBegin;
8325: if (ia) PetscAssertPointer(ia, 6);
8326: if (ja) PetscAssertPointer(ja, 7);
8327: PetscAssertPointer(done, 8);
8328: MatCheckPreallocated(mat, 1);
8330: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8331: else {
8332: *done = PETSC_TRUE;
8333: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8334: if (n) *n = 0;
8335: if (ia) *ia = NULL;
8336: if (ja) *ja = NULL;
8337: }
8338: PetscFunctionReturn(PETSC_SUCCESS);
8339: }
8341: /*@
8342: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8343: `MatGetColumnIJ()`.
8345: Collective
8347: Input Parameters:
8348: + mat - the matrix
8349: . ncolors - maximum color value
8350: . n - number of entries in colorarray
8351: - colorarray - array indicating color for each column
8353: Output Parameter:
8354: . iscoloring - coloring generated using colorarray information
8356: Level: developer
8358: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8359: @*/
8360: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8361: {
8362: PetscFunctionBegin;
8365: PetscAssertPointer(colorarray, 4);
8366: PetscAssertPointer(iscoloring, 5);
8367: MatCheckPreallocated(mat, 1);
8369: if (!mat->ops->coloringpatch) {
8370: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8371: } else {
8372: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8373: }
8374: PetscFunctionReturn(PETSC_SUCCESS);
8375: }
8377: /*@
8378: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8380: Logically Collective
8382: Input Parameter:
8383: . mat - the factored matrix to be reset
8385: Level: developer
8387: Notes:
8388: This routine should be used only with factored matrices formed by in-place
8389: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8390: format). This option can save memory, for example, when solving nonlinear
8391: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8392: ILU(0) preconditioner.
8394: One can specify in-place ILU(0) factorization by calling
8395: .vb
8396: PCType(pc,PCILU);
8397: PCFactorSeUseInPlace(pc);
8398: .ve
8399: or by using the options -pc_type ilu -pc_factor_in_place
8401: In-place factorization ILU(0) can also be used as a local
8402: solver for the blocks within the block Jacobi or additive Schwarz
8403: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8404: for details on setting local solver options.
8406: Most users should employ the `KSP` interface for linear solvers
8407: instead of working directly with matrix algebra routines such as this.
8408: See, e.g., `KSPCreate()`.
8410: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8411: @*/
8412: PetscErrorCode MatSetUnfactored(Mat mat)
8413: {
8414: PetscFunctionBegin;
8417: MatCheckPreallocated(mat, 1);
8418: mat->factortype = MAT_FACTOR_NONE;
8419: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8420: PetscUseTypeMethod(mat, setunfactored);
8421: PetscFunctionReturn(PETSC_SUCCESS);
8422: }
8424: /*@
8425: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8426: as the original matrix.
8428: Collective
8430: Input Parameters:
8431: + mat - the original matrix
8432: . isrow - parallel `IS` containing the rows this processor should obtain
8433: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8434: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8436: Output Parameter:
8437: . newmat - the new submatrix, of the same type as the original matrix
8439: Level: advanced
8441: Notes:
8442: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8444: Some matrix types place restrictions on the row and column indices, such
8445: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8446: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8448: The index sets may not have duplicate entries.
8450: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8451: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8452: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8453: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8454: you are finished using it.
8456: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8457: the input matrix.
8459: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8461: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8462: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8464: Example usage:
8465: Consider the following 8x8 matrix with 34 non-zero values, that is
8466: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8467: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8468: as follows
8469: .vb
8470: 1 2 0 | 0 3 0 | 0 4
8471: Proc0 0 5 6 | 7 0 0 | 8 0
8472: 9 0 10 | 11 0 0 | 12 0
8473: -------------------------------------
8474: 13 0 14 | 15 16 17 | 0 0
8475: Proc1 0 18 0 | 19 20 21 | 0 0
8476: 0 0 0 | 22 23 0 | 24 0
8477: -------------------------------------
8478: Proc2 25 26 27 | 0 0 28 | 29 0
8479: 30 0 0 | 31 32 33 | 0 34
8480: .ve
8482: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8484: .vb
8485: 2 0 | 0 3 0 | 0
8486: Proc0 5 6 | 7 0 0 | 8
8487: -------------------------------
8488: Proc1 18 0 | 19 20 21 | 0
8489: -------------------------------
8490: Proc2 26 27 | 0 0 28 | 29
8491: 0 0 | 31 32 33 | 0
8492: .ve
8494: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8495: @*/
8496: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8497: {
8498: PetscMPIInt size;
8499: Mat *local;
8500: IS iscoltmp;
8501: PetscBool flg;
8503: PetscFunctionBegin;
8507: PetscAssertPointer(newmat, 5);
8510: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8511: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8513: MatCheckPreallocated(mat, 1);
8514: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8516: if (!iscol || isrow == iscol) {
8517: PetscBool stride;
8518: PetscMPIInt grabentirematrix = 0, grab;
8519: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8520: if (stride) {
8521: PetscInt first, step, n, rstart, rend;
8522: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8523: if (step == 1) {
8524: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8525: if (rstart == first) {
8526: PetscCall(ISGetLocalSize(isrow, &n));
8527: if (n == rend - rstart) grabentirematrix = 1;
8528: }
8529: }
8530: }
8531: PetscCallMPI(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8532: if (grab) {
8533: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8534: if (cll == MAT_INITIAL_MATRIX) {
8535: *newmat = mat;
8536: PetscCall(PetscObjectReference((PetscObject)mat));
8537: }
8538: PetscFunctionReturn(PETSC_SUCCESS);
8539: }
8540: }
8542: if (!iscol) {
8543: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8544: } else {
8545: iscoltmp = iscol;
8546: }
8548: /* if original matrix is on just one processor then use submatrix generated */
8549: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8550: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8551: goto setproperties;
8552: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8553: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8554: *newmat = *local;
8555: PetscCall(PetscFree(local));
8556: goto setproperties;
8557: } else if (!mat->ops->createsubmatrix) {
8558: /* Create a new matrix type that implements the operation using the full matrix */
8559: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8560: switch (cll) {
8561: case MAT_INITIAL_MATRIX:
8562: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8563: break;
8564: case MAT_REUSE_MATRIX:
8565: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8566: break;
8567: default:
8568: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8569: }
8570: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8571: goto setproperties;
8572: }
8574: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8575: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8576: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8578: setproperties:
8579: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8580: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8581: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8582: }
8583: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8584: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8585: if (!iscol || isrow == iscol) PetscCall(MatSelectVariableBlockSizes(*newmat, mat, isrow));
8586: PetscFunctionReturn(PETSC_SUCCESS);
8587: }
8589: /*@
8590: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8592: Not Collective
8594: Input Parameters:
8595: + A - the matrix we wish to propagate options from
8596: - B - the matrix we wish to propagate options to
8598: Level: beginner
8600: Note:
8601: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8603: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8604: @*/
8605: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8606: {
8607: PetscFunctionBegin;
8610: B->symmetry_eternal = A->symmetry_eternal;
8611: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8612: B->symmetric = A->symmetric;
8613: B->structurally_symmetric = A->structurally_symmetric;
8614: B->spd = A->spd;
8615: B->hermitian = A->hermitian;
8616: PetscFunctionReturn(PETSC_SUCCESS);
8617: }
8619: /*@
8620: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8621: used during the assembly process to store values that belong to
8622: other processors.
8624: Not Collective
8626: Input Parameters:
8627: + mat - the matrix
8628: . size - the initial size of the stash.
8629: - bsize - the initial size of the block-stash(if used).
8631: Options Database Keys:
8632: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8633: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8635: Level: intermediate
8637: Notes:
8638: The block-stash is used for values set with `MatSetValuesBlocked()` while
8639: the stash is used for values set with `MatSetValues()`
8641: Run with the option -info and look for output of the form
8642: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8643: to determine the appropriate value, MM, to use for size and
8644: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8645: to determine the value, BMM to use for bsize
8647: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8648: @*/
8649: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8650: {
8651: PetscFunctionBegin;
8654: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8655: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8656: PetscFunctionReturn(PETSC_SUCCESS);
8657: }
8659: /*@
8660: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8661: the matrix
8663: Neighbor-wise Collective
8665: Input Parameters:
8666: + A - the matrix
8667: . x - the vector to be multiplied by the interpolation operator
8668: - y - the vector to be added to the result
8670: Output Parameter:
8671: . w - the resulting vector
8673: Level: intermediate
8675: Notes:
8676: `w` may be the same vector as `y`.
8678: This allows one to use either the restriction or interpolation (its transpose)
8679: matrix to do the interpolation
8681: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8682: @*/
8683: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8684: {
8685: PetscInt M, N, Ny;
8687: PetscFunctionBegin;
8692: PetscCall(MatGetSize(A, &M, &N));
8693: PetscCall(VecGetSize(y, &Ny));
8694: if (M == Ny) {
8695: PetscCall(MatMultAdd(A, x, y, w));
8696: } else {
8697: PetscCall(MatMultTransposeAdd(A, x, y, w));
8698: }
8699: PetscFunctionReturn(PETSC_SUCCESS);
8700: }
8702: /*@
8703: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8704: the matrix
8706: Neighbor-wise Collective
8708: Input Parameters:
8709: + A - the matrix
8710: - x - the vector to be interpolated
8712: Output Parameter:
8713: . y - the resulting vector
8715: Level: intermediate
8717: Note:
8718: This allows one to use either the restriction or interpolation (its transpose)
8719: matrix to do the interpolation
8721: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8722: @*/
8723: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8724: {
8725: PetscInt M, N, Ny;
8727: PetscFunctionBegin;
8731: PetscCall(MatGetSize(A, &M, &N));
8732: PetscCall(VecGetSize(y, &Ny));
8733: if (M == Ny) {
8734: PetscCall(MatMult(A, x, y));
8735: } else {
8736: PetscCall(MatMultTranspose(A, x, y));
8737: }
8738: PetscFunctionReturn(PETSC_SUCCESS);
8739: }
8741: /*@
8742: MatRestrict - $y = A*x$ or $A^T*x$
8744: Neighbor-wise Collective
8746: Input Parameters:
8747: + A - the matrix
8748: - x - the vector to be restricted
8750: Output Parameter:
8751: . y - the resulting vector
8753: Level: intermediate
8755: Note:
8756: This allows one to use either the restriction or interpolation (its transpose)
8757: matrix to do the restriction
8759: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8760: @*/
8761: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8762: {
8763: PetscInt M, N, Nx;
8765: PetscFunctionBegin;
8769: PetscCall(MatGetSize(A, &M, &N));
8770: PetscCall(VecGetSize(x, &Nx));
8771: if (M == Nx) {
8772: PetscCall(MatMultTranspose(A, x, y));
8773: } else {
8774: PetscCall(MatMult(A, x, y));
8775: }
8776: PetscFunctionReturn(PETSC_SUCCESS);
8777: }
8779: /*@
8780: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8782: Neighbor-wise Collective
8784: Input Parameters:
8785: + A - the matrix
8786: . x - the input dense matrix to be multiplied
8787: - w - the input dense matrix to be added to the result
8789: Output Parameter:
8790: . y - the output dense matrix
8792: Level: intermediate
8794: Note:
8795: This allows one to use either the restriction or interpolation (its transpose)
8796: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8797: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8799: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8800: @*/
8801: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8802: {
8803: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8804: PetscBool trans = PETSC_TRUE;
8805: MatReuse reuse = MAT_INITIAL_MATRIX;
8807: PetscFunctionBegin;
8813: PetscCall(MatGetSize(A, &M, &N));
8814: PetscCall(MatGetSize(x, &Mx, &Nx));
8815: if (N == Mx) trans = PETSC_FALSE;
8816: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8817: Mo = trans ? N : M;
8818: if (*y) {
8819: PetscCall(MatGetSize(*y, &My, &Ny));
8820: if (Mo == My && Nx == Ny) {
8821: reuse = MAT_REUSE_MATRIX;
8822: } else {
8823: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8824: PetscCall(MatDestroy(y));
8825: }
8826: }
8828: if (w && *y == w) { /* this is to minimize changes in PCMG */
8829: PetscBool flg;
8831: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8832: if (w) {
8833: PetscInt My, Ny, Mw, Nw;
8835: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8836: PetscCall(MatGetSize(*y, &My, &Ny));
8837: PetscCall(MatGetSize(w, &Mw, &Nw));
8838: if (!flg || My != Mw || Ny != Nw) w = NULL;
8839: }
8840: if (!w) {
8841: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8842: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8843: PetscCall(PetscObjectDereference((PetscObject)w));
8844: } else {
8845: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8846: }
8847: }
8848: if (!trans) {
8849: PetscCall(MatMatMult(A, x, reuse, PETSC_DETERMINE, y));
8850: } else {
8851: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DETERMINE, y));
8852: }
8853: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8854: PetscFunctionReturn(PETSC_SUCCESS);
8855: }
8857: /*@
8858: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8860: Neighbor-wise Collective
8862: Input Parameters:
8863: + A - the matrix
8864: - x - the input dense matrix
8866: Output Parameter:
8867: . y - the output dense matrix
8869: Level: intermediate
8871: Note:
8872: This allows one to use either the restriction or interpolation (its transpose)
8873: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8874: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8876: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8877: @*/
8878: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8879: {
8880: PetscFunctionBegin;
8881: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8882: PetscFunctionReturn(PETSC_SUCCESS);
8883: }
8885: /*@
8886: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8888: Neighbor-wise Collective
8890: Input Parameters:
8891: + A - the matrix
8892: - x - the input dense matrix
8894: Output Parameter:
8895: . y - the output dense matrix
8897: Level: intermediate
8899: Note:
8900: This allows one to use either the restriction or interpolation (its transpose)
8901: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
8902: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8904: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8905: @*/
8906: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8907: {
8908: PetscFunctionBegin;
8909: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8910: PetscFunctionReturn(PETSC_SUCCESS);
8911: }
8913: /*@
8914: MatGetNullSpace - retrieves the null space of a matrix.
8916: Logically Collective
8918: Input Parameters:
8919: + mat - the matrix
8920: - nullsp - the null space object
8922: Level: developer
8924: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8925: @*/
8926: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8927: {
8928: PetscFunctionBegin;
8930: PetscAssertPointer(nullsp, 2);
8931: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8932: PetscFunctionReturn(PETSC_SUCCESS);
8933: }
8935: /*@C
8936: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
8938: Logically Collective
8940: Input Parameters:
8941: + n - the number of matrices
8942: - mat - the array of matrices
8944: Output Parameters:
8945: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
8947: Level: developer
8949: Note:
8950: Call `MatRestoreNullspaces()` to provide these to another array of matrices
8952: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8953: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
8954: @*/
8955: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8956: {
8957: PetscFunctionBegin;
8958: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8959: PetscAssertPointer(mat, 2);
8960: PetscAssertPointer(nullsp, 3);
8962: PetscCall(PetscCalloc1(3 * n, nullsp));
8963: for (PetscInt i = 0; i < n; i++) {
8965: (*nullsp)[i] = mat[i]->nullsp;
8966: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
8967: (*nullsp)[n + i] = mat[i]->nearnullsp;
8968: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
8969: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
8970: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
8971: }
8972: PetscFunctionReturn(PETSC_SUCCESS);
8973: }
8975: /*@C
8976: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
8978: Logically Collective
8980: Input Parameters:
8981: + n - the number of matrices
8982: . mat - the array of matrices
8983: - nullsp - an array of null spaces
8985: Level: developer
8987: Note:
8988: Call `MatGetNullSpaces()` to create `nullsp`
8990: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8991: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
8992: @*/
8993: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8994: {
8995: PetscFunctionBegin;
8996: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8997: PetscAssertPointer(mat, 2);
8998: PetscAssertPointer(nullsp, 3);
8999: PetscAssertPointer(*nullsp, 3);
9001: for (PetscInt i = 0; i < n; i++) {
9003: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
9004: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
9005: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
9006: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
9007: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
9008: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
9009: }
9010: PetscCall(PetscFree(*nullsp));
9011: PetscFunctionReturn(PETSC_SUCCESS);
9012: }
9014: /*@
9015: MatSetNullSpace - attaches a null space to a matrix.
9017: Logically Collective
9019: Input Parameters:
9020: + mat - the matrix
9021: - nullsp - the null space object
9023: Level: advanced
9025: Notes:
9026: This null space is used by the `KSP` linear solvers to solve singular systems.
9028: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
9030: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge
9031: to zero but the linear system will still be solved in a least squares sense.
9033: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
9034: the domain of a matrix $A$ (from $R^n$ to $R^m$ ($m$ rows, $n$ columns) $R^n$ = the direct sum of the null space of $A$, $n(A)$, plus the range of $A^T$, $R(A^T)$.
9035: Similarly $R^m$ = direct sum $n(A^T) + R(A)$. Hence the linear system $A x = b$ has a solution only if $b$ in $R(A)$ (or correspondingly $b$ is orthogonal to
9036: $n(A^T))$ and if $x$ is a solution then $x + \alpha n(A)$ is a solution for any $\alpha$. The minimum norm solution is orthogonal to $n(A)$. For problems without a solution
9037: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving $A x = \hat{b}$ where $\hat{b}$ is $b$ orthogonalized to the $n(A^T)$.
9038: This $\hat{b}$ can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
9040: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one has called
9041: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9042: routine also automatically calls `MatSetTransposeNullSpace()`.
9044: The user should call `MatNullSpaceDestroy()`.
9046: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9047: `KSPSetPCSide()`
9048: @*/
9049: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9050: {
9051: PetscFunctionBegin;
9054: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9055: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9056: mat->nullsp = nullsp;
9057: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9058: PetscFunctionReturn(PETSC_SUCCESS);
9059: }
9061: /*@
9062: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
9064: Logically Collective
9066: Input Parameters:
9067: + mat - the matrix
9068: - nullsp - the null space object
9070: Level: developer
9072: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9073: @*/
9074: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9075: {
9076: PetscFunctionBegin;
9079: PetscAssertPointer(nullsp, 2);
9080: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9081: PetscFunctionReturn(PETSC_SUCCESS);
9082: }
9084: /*@
9085: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
9087: Logically Collective
9089: Input Parameters:
9090: + mat - the matrix
9091: - nullsp - the null space object
9093: Level: advanced
9095: Notes:
9096: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9098: See `MatSetNullSpace()`
9100: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9101: @*/
9102: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9103: {
9104: PetscFunctionBegin;
9107: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9108: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9109: mat->transnullsp = nullsp;
9110: PetscFunctionReturn(PETSC_SUCCESS);
9111: }
9113: /*@
9114: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9115: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9117: Logically Collective
9119: Input Parameters:
9120: + mat - the matrix
9121: - nullsp - the null space object
9123: Level: advanced
9125: Notes:
9126: Overwrites any previous near null space that may have been attached
9128: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9130: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9131: @*/
9132: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9133: {
9134: PetscFunctionBegin;
9138: MatCheckPreallocated(mat, 1);
9139: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9140: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9141: mat->nearnullsp = nullsp;
9142: PetscFunctionReturn(PETSC_SUCCESS);
9143: }
9145: /*@
9146: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9148: Not Collective
9150: Input Parameter:
9151: . mat - the matrix
9153: Output Parameter:
9154: . nullsp - the null space object, `NULL` if not set
9156: Level: advanced
9158: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9159: @*/
9160: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9161: {
9162: PetscFunctionBegin;
9165: PetscAssertPointer(nullsp, 2);
9166: MatCheckPreallocated(mat, 1);
9167: *nullsp = mat->nearnullsp;
9168: PetscFunctionReturn(PETSC_SUCCESS);
9169: }
9171: /*@
9172: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9174: Collective
9176: Input Parameters:
9177: + mat - the matrix
9178: . row - row/column permutation
9179: - info - information on desired factorization process
9181: Level: developer
9183: Notes:
9184: Probably really in-place only when level of fill is zero, otherwise allocates
9185: new space to store factored matrix and deletes previous memory.
9187: Most users should employ the `KSP` interface for linear solvers
9188: instead of working directly with matrix algebra routines such as this.
9189: See, e.g., `KSPCreate()`.
9191: Fortran Note:
9192: A valid (non-null) `info` argument must be provided
9194: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9195: @*/
9196: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9197: {
9198: PetscFunctionBegin;
9202: PetscAssertPointer(info, 3);
9203: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9204: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9205: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9206: MatCheckPreallocated(mat, 1);
9207: PetscUseTypeMethod(mat, iccfactor, row, info);
9208: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9209: PetscFunctionReturn(PETSC_SUCCESS);
9210: }
9212: /*@
9213: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9214: ghosted ones.
9216: Not Collective
9218: Input Parameters:
9219: + mat - the matrix
9220: - diag - the diagonal values, including ghost ones
9222: Level: developer
9224: Notes:
9225: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9227: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9229: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9230: @*/
9231: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9232: {
9233: PetscMPIInt size;
9235: PetscFunctionBegin;
9240: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9241: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9242: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9243: if (size == 1) {
9244: PetscInt n, m;
9245: PetscCall(VecGetSize(diag, &n));
9246: PetscCall(MatGetSize(mat, NULL, &m));
9247: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9248: PetscCall(MatDiagonalScale(mat, NULL, diag));
9249: } else {
9250: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9251: }
9252: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9253: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9254: PetscFunctionReturn(PETSC_SUCCESS);
9255: }
9257: /*@
9258: MatGetInertia - Gets the inertia from a factored matrix
9260: Collective
9262: Input Parameter:
9263: . mat - the matrix
9265: Output Parameters:
9266: + nneg - number of negative eigenvalues
9267: . nzero - number of zero eigenvalues
9268: - npos - number of positive eigenvalues
9270: Level: advanced
9272: Note:
9273: Matrix must have been factored by `MatCholeskyFactor()`
9275: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9276: @*/
9277: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9278: {
9279: PetscFunctionBegin;
9282: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9283: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9284: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9285: PetscFunctionReturn(PETSC_SUCCESS);
9286: }
9288: /*@C
9289: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9291: Neighbor-wise Collective
9293: Input Parameters:
9294: + mat - the factored matrix obtained with `MatGetFactor()`
9295: - b - the right-hand-side vectors
9297: Output Parameter:
9298: . x - the result vectors
9300: Level: developer
9302: Note:
9303: The vectors `b` and `x` cannot be the same. I.e., one cannot
9304: call `MatSolves`(A,x,x).
9306: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9307: @*/
9308: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9309: {
9310: PetscFunctionBegin;
9313: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9314: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9315: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9317: MatCheckPreallocated(mat, 1);
9318: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9319: PetscUseTypeMethod(mat, solves, b, x);
9320: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9321: PetscFunctionReturn(PETSC_SUCCESS);
9322: }
9324: /*@
9325: MatIsSymmetric - Test whether a matrix is symmetric
9327: Collective
9329: Input Parameters:
9330: + A - the matrix to test
9331: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9333: Output Parameter:
9334: . flg - the result
9336: Level: intermediate
9338: Notes:
9339: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9341: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9343: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9344: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9346: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9347: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9348: @*/
9349: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9350: {
9351: PetscFunctionBegin;
9353: PetscAssertPointer(flg, 3);
9354: if (A->symmetric != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->symmetric);
9355: else {
9356: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9357: else PetscCall(MatIsTranspose(A, A, tol, flg));
9358: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9359: }
9360: PetscFunctionReturn(PETSC_SUCCESS);
9361: }
9363: /*@
9364: MatIsHermitian - Test whether a matrix is Hermitian
9366: Collective
9368: Input Parameters:
9369: + A - the matrix to test
9370: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9372: Output Parameter:
9373: . flg - the result
9375: Level: intermediate
9377: Notes:
9378: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9380: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9382: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9383: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9385: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9386: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9387: @*/
9388: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9389: {
9390: PetscFunctionBegin;
9392: PetscAssertPointer(flg, 3);
9393: if (A->hermitian != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->hermitian);
9394: else {
9395: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9396: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9397: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9398: }
9399: PetscFunctionReturn(PETSC_SUCCESS);
9400: }
9402: /*@
9403: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9405: Not Collective
9407: Input Parameter:
9408: . A - the matrix to check
9410: Output Parameters:
9411: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9412: - flg - the result (only valid if set is `PETSC_TRUE`)
9414: Level: advanced
9416: Notes:
9417: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9418: if you want it explicitly checked
9420: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9421: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9423: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9424: @*/
9425: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9426: {
9427: PetscFunctionBegin;
9429: PetscAssertPointer(set, 2);
9430: PetscAssertPointer(flg, 3);
9431: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9432: *set = PETSC_TRUE;
9433: *flg = PetscBool3ToBool(A->symmetric);
9434: } else {
9435: *set = PETSC_FALSE;
9436: }
9437: PetscFunctionReturn(PETSC_SUCCESS);
9438: }
9440: /*@
9441: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9443: Not Collective
9445: Input Parameter:
9446: . A - the matrix to check
9448: Output Parameters:
9449: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9450: - flg - the result (only valid if set is `PETSC_TRUE`)
9452: Level: advanced
9454: Notes:
9455: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9457: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9458: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9460: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9461: @*/
9462: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9463: {
9464: PetscFunctionBegin;
9466: PetscAssertPointer(set, 2);
9467: PetscAssertPointer(flg, 3);
9468: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9469: *set = PETSC_TRUE;
9470: *flg = PetscBool3ToBool(A->spd);
9471: } else {
9472: *set = PETSC_FALSE;
9473: }
9474: PetscFunctionReturn(PETSC_SUCCESS);
9475: }
9477: /*@
9478: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9480: Not Collective
9482: Input Parameter:
9483: . A - the matrix to check
9485: Output Parameters:
9486: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9487: - flg - the result (only valid if set is `PETSC_TRUE`)
9489: Level: advanced
9491: Notes:
9492: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9493: if you want it explicitly checked
9495: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9496: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9498: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9499: @*/
9500: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9501: {
9502: PetscFunctionBegin;
9504: PetscAssertPointer(set, 2);
9505: PetscAssertPointer(flg, 3);
9506: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9507: *set = PETSC_TRUE;
9508: *flg = PetscBool3ToBool(A->hermitian);
9509: } else {
9510: *set = PETSC_FALSE;
9511: }
9512: PetscFunctionReturn(PETSC_SUCCESS);
9513: }
9515: /*@
9516: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9518: Collective
9520: Input Parameter:
9521: . A - the matrix to test
9523: Output Parameter:
9524: . flg - the result
9526: Level: intermediate
9528: Notes:
9529: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9531: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9532: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9534: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9535: @*/
9536: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9537: {
9538: PetscFunctionBegin;
9540: PetscAssertPointer(flg, 2);
9541: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9542: *flg = PetscBool3ToBool(A->structurally_symmetric);
9543: } else {
9544: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9545: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9546: }
9547: PetscFunctionReturn(PETSC_SUCCESS);
9548: }
9550: /*@
9551: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9553: Not Collective
9555: Input Parameter:
9556: . A - the matrix to check
9558: Output Parameters:
9559: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9560: - flg - the result (only valid if set is PETSC_TRUE)
9562: Level: advanced
9564: Notes:
9565: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9566: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9568: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9570: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9571: @*/
9572: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9573: {
9574: PetscFunctionBegin;
9576: PetscAssertPointer(set, 2);
9577: PetscAssertPointer(flg, 3);
9578: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9579: *set = PETSC_TRUE;
9580: *flg = PetscBool3ToBool(A->structurally_symmetric);
9581: } else {
9582: *set = PETSC_FALSE;
9583: }
9584: PetscFunctionReturn(PETSC_SUCCESS);
9585: }
9587: /*@
9588: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9589: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9591: Not Collective
9593: Input Parameter:
9594: . mat - the matrix
9596: Output Parameters:
9597: + nstash - the size of the stash
9598: . reallocs - the number of additional mallocs incurred.
9599: . bnstash - the size of the block stash
9600: - breallocs - the number of additional mallocs incurred.in the block stash
9602: Level: advanced
9604: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9605: @*/
9606: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9607: {
9608: PetscFunctionBegin;
9609: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9610: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9611: PetscFunctionReturn(PETSC_SUCCESS);
9612: }
9614: /*@
9615: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9616: parallel layout, `PetscLayout` for rows and columns
9618: Collective
9620: Input Parameter:
9621: . mat - the matrix
9623: Output Parameters:
9624: + right - (optional) vector that the matrix can be multiplied against
9625: - left - (optional) vector that the matrix vector product can be stored in
9627: Level: advanced
9629: Notes:
9630: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9632: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9634: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9635: @*/
9636: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9637: {
9638: PetscFunctionBegin;
9641: if (mat->ops->getvecs) {
9642: PetscUseTypeMethod(mat, getvecs, right, left);
9643: } else {
9644: if (right) {
9645: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9646: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9647: PetscCall(VecSetType(*right, mat->defaultvectype));
9648: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9649: if (mat->boundtocpu && mat->bindingpropagates) {
9650: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9651: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9652: }
9653: #endif
9654: }
9655: if (left) {
9656: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9657: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9658: PetscCall(VecSetType(*left, mat->defaultvectype));
9659: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9660: if (mat->boundtocpu && mat->bindingpropagates) {
9661: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9662: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9663: }
9664: #endif
9665: }
9666: }
9667: PetscFunctionReturn(PETSC_SUCCESS);
9668: }
9670: /*@
9671: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9672: with default values.
9674: Not Collective
9676: Input Parameter:
9677: . info - the `MatFactorInfo` data structure
9679: Level: developer
9681: Notes:
9682: The solvers are generally used through the `KSP` and `PC` objects, for example
9683: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9685: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9687: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9688: @*/
9689: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9690: {
9691: PetscFunctionBegin;
9692: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9693: PetscFunctionReturn(PETSC_SUCCESS);
9694: }
9696: /*@
9697: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9699: Collective
9701: Input Parameters:
9702: + mat - the factored matrix
9703: - is - the index set defining the Schur indices (0-based)
9705: Level: advanced
9707: Notes:
9708: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9710: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9712: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9714: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9715: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9716: @*/
9717: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9718: {
9719: PetscErrorCode (*f)(Mat, IS);
9721: PetscFunctionBegin;
9726: PetscCheckSameComm(mat, 1, is, 2);
9727: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9728: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9729: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9730: PetscCall(MatDestroy(&mat->schur));
9731: PetscCall((*f)(mat, is));
9732: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9733: PetscFunctionReturn(PETSC_SUCCESS);
9734: }
9736: /*@
9737: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9739: Logically Collective
9741: Input Parameters:
9742: + F - the factored matrix obtained by calling `MatGetFactor()`
9743: . S - location where to return the Schur complement, can be `NULL`
9744: - status - the status of the Schur complement matrix, can be `NULL`
9746: Level: advanced
9748: Notes:
9749: You must call `MatFactorSetSchurIS()` before calling this routine.
9751: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9753: The routine provides a copy of the Schur matrix stored within the solver data structures.
9754: The caller must destroy the object when it is no longer needed.
9755: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9757: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9759: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9761: Developer Note:
9762: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9763: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9765: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9766: @*/
9767: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9768: {
9769: PetscFunctionBegin;
9771: if (S) PetscAssertPointer(S, 2);
9772: if (status) PetscAssertPointer(status, 3);
9773: if (S) {
9774: PetscErrorCode (*f)(Mat, Mat *);
9776: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9777: if (f) {
9778: PetscCall((*f)(F, S));
9779: } else {
9780: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9781: }
9782: }
9783: if (status) *status = F->schur_status;
9784: PetscFunctionReturn(PETSC_SUCCESS);
9785: }
9787: /*@
9788: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9790: Logically Collective
9792: Input Parameters:
9793: + F - the factored matrix obtained by calling `MatGetFactor()`
9794: . S - location where to return the Schur complement, can be `NULL`
9795: - status - the status of the Schur complement matrix, can be `NULL`
9797: Level: advanced
9799: Notes:
9800: You must call `MatFactorSetSchurIS()` before calling this routine.
9802: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9804: The routine returns a the Schur Complement stored within the data structures of the solver.
9806: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9808: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9810: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9812: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9814: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9815: @*/
9816: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9817: {
9818: PetscFunctionBegin;
9820: if (S) {
9821: PetscAssertPointer(S, 2);
9822: *S = F->schur;
9823: }
9824: if (status) {
9825: PetscAssertPointer(status, 3);
9826: *status = F->schur_status;
9827: }
9828: PetscFunctionReturn(PETSC_SUCCESS);
9829: }
9831: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9832: {
9833: Mat S = F->schur;
9835: PetscFunctionBegin;
9836: switch (F->schur_status) {
9837: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9838: case MAT_FACTOR_SCHUR_INVERTED:
9839: if (S) {
9840: S->ops->solve = NULL;
9841: S->ops->matsolve = NULL;
9842: S->ops->solvetranspose = NULL;
9843: S->ops->matsolvetranspose = NULL;
9844: S->ops->solveadd = NULL;
9845: S->ops->solvetransposeadd = NULL;
9846: S->factortype = MAT_FACTOR_NONE;
9847: PetscCall(PetscFree(S->solvertype));
9848: }
9849: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9850: break;
9851: default:
9852: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9853: }
9854: PetscFunctionReturn(PETSC_SUCCESS);
9855: }
9857: /*@
9858: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9860: Logically Collective
9862: Input Parameters:
9863: + F - the factored matrix obtained by calling `MatGetFactor()`
9864: . S - location where the Schur complement is stored
9865: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9867: Level: advanced
9869: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9870: @*/
9871: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9872: {
9873: PetscFunctionBegin;
9875: if (S) {
9877: *S = NULL;
9878: }
9879: F->schur_status = status;
9880: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9881: PetscFunctionReturn(PETSC_SUCCESS);
9882: }
9884: /*@
9885: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9887: Logically Collective
9889: Input Parameters:
9890: + F - the factored matrix obtained by calling `MatGetFactor()`
9891: . rhs - location where the right-hand side of the Schur complement system is stored
9892: - sol - location where the solution of the Schur complement system has to be returned
9894: Level: advanced
9896: Notes:
9897: The sizes of the vectors should match the size of the Schur complement
9899: Must be called after `MatFactorSetSchurIS()`
9901: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9902: @*/
9903: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9904: {
9905: PetscFunctionBegin;
9912: PetscCheckSameComm(F, 1, rhs, 2);
9913: PetscCheckSameComm(F, 1, sol, 3);
9914: PetscCall(MatFactorFactorizeSchurComplement(F));
9915: switch (F->schur_status) {
9916: case MAT_FACTOR_SCHUR_FACTORED:
9917: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9918: break;
9919: case MAT_FACTOR_SCHUR_INVERTED:
9920: PetscCall(MatMultTranspose(F->schur, rhs, sol));
9921: break;
9922: default:
9923: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9924: }
9925: PetscFunctionReturn(PETSC_SUCCESS);
9926: }
9928: /*@
9929: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9931: Logically Collective
9933: Input Parameters:
9934: + F - the factored matrix obtained by calling `MatGetFactor()`
9935: . rhs - location where the right-hand side of the Schur complement system is stored
9936: - sol - location where the solution of the Schur complement system has to be returned
9938: Level: advanced
9940: Notes:
9941: The sizes of the vectors should match the size of the Schur complement
9943: Must be called after `MatFactorSetSchurIS()`
9945: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
9946: @*/
9947: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
9948: {
9949: PetscFunctionBegin;
9956: PetscCheckSameComm(F, 1, rhs, 2);
9957: PetscCheckSameComm(F, 1, sol, 3);
9958: PetscCall(MatFactorFactorizeSchurComplement(F));
9959: switch (F->schur_status) {
9960: case MAT_FACTOR_SCHUR_FACTORED:
9961: PetscCall(MatSolve(F->schur, rhs, sol));
9962: break;
9963: case MAT_FACTOR_SCHUR_INVERTED:
9964: PetscCall(MatMult(F->schur, rhs, sol));
9965: break;
9966: default:
9967: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9968: }
9969: PetscFunctionReturn(PETSC_SUCCESS);
9970: }
9972: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
9973: #if PetscDefined(HAVE_CUDA)
9974: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
9975: #endif
9977: /* Schur status updated in the interface */
9978: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
9979: {
9980: Mat S = F->schur;
9982: PetscFunctionBegin;
9983: if (S) {
9984: PetscMPIInt size;
9985: PetscBool isdense, isdensecuda;
9987: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
9988: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
9989: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
9990: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
9991: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
9992: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
9993: if (isdense) {
9994: PetscCall(MatSeqDenseInvertFactors_Private(S));
9995: } else if (isdensecuda) {
9996: #if defined(PETSC_HAVE_CUDA)
9997: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
9998: #endif
9999: }
10000: // HIP??????????????
10001: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
10002: }
10003: PetscFunctionReturn(PETSC_SUCCESS);
10004: }
10006: /*@
10007: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
10009: Logically Collective
10011: Input Parameter:
10012: . F - the factored matrix obtained by calling `MatGetFactor()`
10014: Level: advanced
10016: Notes:
10017: Must be called after `MatFactorSetSchurIS()`.
10019: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
10021: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
10022: @*/
10023: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
10024: {
10025: PetscFunctionBegin;
10028: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
10029: PetscCall(MatFactorFactorizeSchurComplement(F));
10030: PetscCall(MatFactorInvertSchurComplement_Private(F));
10031: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
10032: PetscFunctionReturn(PETSC_SUCCESS);
10033: }
10035: /*@
10036: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
10038: Logically Collective
10040: Input Parameter:
10041: . F - the factored matrix obtained by calling `MatGetFactor()`
10043: Level: advanced
10045: Note:
10046: Must be called after `MatFactorSetSchurIS()`
10048: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10049: @*/
10050: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10051: {
10052: MatFactorInfo info;
10054: PetscFunctionBegin;
10057: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10058: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10059: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10060: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10061: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10062: } else {
10063: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10064: }
10065: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10066: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10067: PetscFunctionReturn(PETSC_SUCCESS);
10068: }
10070: /*@
10071: MatPtAP - Creates the matrix product $C = P^T * A * P$
10073: Neighbor-wise Collective
10075: Input Parameters:
10076: + A - the matrix
10077: . P - the projection matrix
10078: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10079: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10080: if the result is a dense matrix this is irrelevant
10082: Output Parameter:
10083: . C - the product matrix
10085: Level: intermediate
10087: Notes:
10088: C will be created and must be destroyed by the user with `MatDestroy()`.
10090: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10092: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10094: Developer Note:
10095: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10097: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10098: @*/
10099: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10100: {
10101: PetscFunctionBegin;
10102: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10103: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10105: if (scall == MAT_INITIAL_MATRIX) {
10106: PetscCall(MatProductCreate(A, P, NULL, C));
10107: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10108: PetscCall(MatProductSetAlgorithm(*C, "default"));
10109: PetscCall(MatProductSetFill(*C, fill));
10111: (*C)->product->api_user = PETSC_TRUE;
10112: PetscCall(MatProductSetFromOptions(*C));
10113: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10114: PetscCall(MatProductSymbolic(*C));
10115: } else { /* scall == MAT_REUSE_MATRIX */
10116: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10117: }
10119: PetscCall(MatProductNumeric(*C));
10120: (*C)->symmetric = A->symmetric;
10121: (*C)->spd = A->spd;
10122: PetscFunctionReturn(PETSC_SUCCESS);
10123: }
10125: /*@
10126: MatRARt - Creates the matrix product $C = R * A * R^T$
10128: Neighbor-wise Collective
10130: Input Parameters:
10131: + A - the matrix
10132: . R - the projection matrix
10133: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10134: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10135: if the result is a dense matrix this is irrelevant
10137: Output Parameter:
10138: . C - the product matrix
10140: Level: intermediate
10142: Notes:
10143: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10145: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10147: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10148: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10149: the parallel `MatRARt()` is implemented computing the explicit transpose of `R`, which can be very expensive.
10150: We recommend using `MatPtAP()` when possible.
10152: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10154: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10155: @*/
10156: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10157: {
10158: PetscFunctionBegin;
10159: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10160: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10162: if (scall == MAT_INITIAL_MATRIX) {
10163: PetscCall(MatProductCreate(A, R, NULL, C));
10164: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10165: PetscCall(MatProductSetAlgorithm(*C, "default"));
10166: PetscCall(MatProductSetFill(*C, fill));
10168: (*C)->product->api_user = PETSC_TRUE;
10169: PetscCall(MatProductSetFromOptions(*C));
10170: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10171: PetscCall(MatProductSymbolic(*C));
10172: } else { /* scall == MAT_REUSE_MATRIX */
10173: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10174: }
10176: PetscCall(MatProductNumeric(*C));
10177: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10178: PetscFunctionReturn(PETSC_SUCCESS);
10179: }
10181: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10182: {
10183: PetscBool flg = PETSC_TRUE;
10185: PetscFunctionBegin;
10186: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10187: if (scall == MAT_INITIAL_MATRIX) {
10188: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10189: PetscCall(MatProductCreate(A, B, NULL, C));
10190: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10191: PetscCall(MatProductSetFill(*C, fill));
10192: } else { /* scall == MAT_REUSE_MATRIX */
10193: Mat_Product *product = (*C)->product;
10195: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10196: if (flg && product && product->type != ptype) {
10197: PetscCall(MatProductClear(*C));
10198: product = NULL;
10199: }
10200: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10201: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10202: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10203: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10204: product = (*C)->product;
10205: product->fill = fill;
10206: product->clear = PETSC_TRUE;
10207: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10208: flg = PETSC_FALSE;
10209: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10210: }
10211: }
10212: if (flg) {
10213: (*C)->product->api_user = PETSC_TRUE;
10214: PetscCall(MatProductSetType(*C, ptype));
10215: PetscCall(MatProductSetFromOptions(*C));
10216: PetscCall(MatProductSymbolic(*C));
10217: }
10218: PetscCall(MatProductNumeric(*C));
10219: PetscFunctionReturn(PETSC_SUCCESS);
10220: }
10222: /*@
10223: MatMatMult - Performs matrix-matrix multiplication C=A*B.
10225: Neighbor-wise Collective
10227: Input Parameters:
10228: + A - the left matrix
10229: . B - the right matrix
10230: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10231: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10232: if the result is a dense matrix this is irrelevant
10234: Output Parameter:
10235: . C - the product matrix
10237: Notes:
10238: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10240: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10241: call to this function with `MAT_INITIAL_MATRIX`.
10243: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value actually needed.
10245: In the special case where matrix `B` (and hence `C`) are dense you can create the correctly sized matrix `C` yourself and then call this routine with `MAT_REUSE_MATRIX`,
10246: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix `C` is sparse.
10248: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10250: Example of Usage:
10251: .vb
10252: MatProductCreate(A,B,NULL,&C);
10253: MatProductSetType(C,MATPRODUCT_AB);
10254: MatProductSymbolic(C);
10255: MatProductNumeric(C); // compute C=A * B
10256: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10257: MatProductNumeric(C);
10258: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10259: MatProductNumeric(C);
10260: .ve
10262: Level: intermediate
10264: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10265: @*/
10266: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10267: {
10268: PetscFunctionBegin;
10269: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10270: PetscFunctionReturn(PETSC_SUCCESS);
10271: }
10273: /*@
10274: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10276: Neighbor-wise Collective
10278: Input Parameters:
10279: + A - the left matrix
10280: . B - the right matrix
10281: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10282: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10284: Output Parameter:
10285: . C - the product matrix
10287: Options Database Key:
10288: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10289: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10290: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10292: Level: intermediate
10294: Notes:
10295: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10297: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10299: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10300: actually needed.
10302: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10303: and for pairs of `MATMPIDENSE` matrices.
10305: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`
10307: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10309: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10310: @*/
10311: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10312: {
10313: PetscFunctionBegin;
10314: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10315: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10316: PetscFunctionReturn(PETSC_SUCCESS);
10317: }
10319: /*@
10320: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10322: Neighbor-wise Collective
10324: Input Parameters:
10325: + A - the left matrix
10326: . B - the right matrix
10327: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10328: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10330: Output Parameter:
10331: . C - the product matrix
10333: Level: intermediate
10335: Notes:
10336: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10338: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
10340: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`
10342: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10343: actually needed.
10345: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10346: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10348: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10350: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10351: @*/
10352: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10353: {
10354: PetscFunctionBegin;
10355: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10356: PetscFunctionReturn(PETSC_SUCCESS);
10357: }
10359: /*@
10360: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10362: Neighbor-wise Collective
10364: Input Parameters:
10365: + A - the left matrix
10366: . B - the middle matrix
10367: . C - the right matrix
10368: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10369: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10370: if the result is a dense matrix this is irrelevant
10372: Output Parameter:
10373: . D - the product matrix
10375: Level: intermediate
10377: Notes:
10378: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10380: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10382: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`
10384: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10385: actually needed.
10387: If you have many matrices with the same non-zero structure to multiply, you
10388: should use `MAT_REUSE_MATRIX` in all calls but the first
10390: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10392: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10393: @*/
10394: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10395: {
10396: PetscFunctionBegin;
10397: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10398: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10400: if (scall == MAT_INITIAL_MATRIX) {
10401: PetscCall(MatProductCreate(A, B, C, D));
10402: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10403: PetscCall(MatProductSetAlgorithm(*D, "default"));
10404: PetscCall(MatProductSetFill(*D, fill));
10406: (*D)->product->api_user = PETSC_TRUE;
10407: PetscCall(MatProductSetFromOptions(*D));
10408: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10409: ((PetscObject)C)->type_name);
10410: PetscCall(MatProductSymbolic(*D));
10411: } else { /* user may change input matrices when REUSE */
10412: PetscCall(MatProductReplaceMats(A, B, C, *D));
10413: }
10414: PetscCall(MatProductNumeric(*D));
10415: PetscFunctionReturn(PETSC_SUCCESS);
10416: }
10418: /*@
10419: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10421: Collective
10423: Input Parameters:
10424: + mat - the matrix
10425: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10426: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10427: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10429: Output Parameter:
10430: . matredundant - redundant matrix
10432: Level: advanced
10434: Notes:
10435: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10436: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10438: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10439: calling it.
10441: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10443: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10444: @*/
10445: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10446: {
10447: MPI_Comm comm;
10448: PetscMPIInt size;
10449: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10450: Mat_Redundant *redund = NULL;
10451: PetscSubcomm psubcomm = NULL;
10452: MPI_Comm subcomm_in = subcomm;
10453: Mat *matseq;
10454: IS isrow, iscol;
10455: PetscBool newsubcomm = PETSC_FALSE;
10457: PetscFunctionBegin;
10459: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10460: PetscAssertPointer(*matredundant, 5);
10462: }
10464: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10465: if (size == 1 || nsubcomm == 1) {
10466: if (reuse == MAT_INITIAL_MATRIX) {
10467: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10468: } else {
10469: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10470: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10471: }
10472: PetscFunctionReturn(PETSC_SUCCESS);
10473: }
10475: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10476: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10477: MatCheckPreallocated(mat, 1);
10479: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10480: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10481: /* create psubcomm, then get subcomm */
10482: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10483: PetscCallMPI(MPI_Comm_size(comm, &size));
10484: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10486: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10487: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10488: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10489: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10490: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10491: newsubcomm = PETSC_TRUE;
10492: PetscCall(PetscSubcommDestroy(&psubcomm));
10493: }
10495: /* get isrow, iscol and a local sequential matrix matseq[0] */
10496: if (reuse == MAT_INITIAL_MATRIX) {
10497: mloc_sub = PETSC_DECIDE;
10498: nloc_sub = PETSC_DECIDE;
10499: if (bs < 1) {
10500: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10501: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10502: } else {
10503: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10504: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10505: }
10506: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10507: rstart = rend - mloc_sub;
10508: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10509: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10510: PetscCall(ISSetIdentity(iscol));
10511: } else { /* reuse == MAT_REUSE_MATRIX */
10512: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10513: /* retrieve subcomm */
10514: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10515: redund = (*matredundant)->redundant;
10516: isrow = redund->isrow;
10517: iscol = redund->iscol;
10518: matseq = redund->matseq;
10519: }
10520: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10522: /* get matredundant over subcomm */
10523: if (reuse == MAT_INITIAL_MATRIX) {
10524: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10526: /* create a supporting struct and attach it to C for reuse */
10527: PetscCall(PetscNew(&redund));
10528: (*matredundant)->redundant = redund;
10529: redund->isrow = isrow;
10530: redund->iscol = iscol;
10531: redund->matseq = matseq;
10532: if (newsubcomm) {
10533: redund->subcomm = subcomm;
10534: } else {
10535: redund->subcomm = MPI_COMM_NULL;
10536: }
10537: } else {
10538: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10539: }
10540: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10541: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10542: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10543: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10544: }
10545: #endif
10546: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10547: PetscFunctionReturn(PETSC_SUCCESS);
10548: }
10550: /*@C
10551: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10552: a given `Mat`. Each submatrix can span multiple procs.
10554: Collective
10556: Input Parameters:
10557: + mat - the matrix
10558: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10559: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10561: Output Parameter:
10562: . subMat - parallel sub-matrices each spanning a given `subcomm`
10564: Level: advanced
10566: Notes:
10567: The submatrix partition across processors is dictated by `subComm` a
10568: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10569: is not restricted to be grouped with consecutive original MPI processes.
10571: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10572: map directly to the layout of the original matrix [wrt the local
10573: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10574: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10575: the `subMat`. However the offDiagMat looses some columns - and this is
10576: reconstructed with `MatSetValues()`
10578: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10580: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10581: @*/
10582: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10583: {
10584: PetscMPIInt commsize, subCommSize;
10586: PetscFunctionBegin;
10587: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10588: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10589: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10591: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10592: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10593: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10594: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10595: PetscFunctionReturn(PETSC_SUCCESS);
10596: }
10598: /*@
10599: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10601: Not Collective
10603: Input Parameters:
10604: + mat - matrix to extract local submatrix from
10605: . isrow - local row indices for submatrix
10606: - iscol - local column indices for submatrix
10608: Output Parameter:
10609: . submat - the submatrix
10611: Level: intermediate
10613: Notes:
10614: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10616: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10617: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10619: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10620: `MatSetValuesBlockedLocal()` will also be implemented.
10622: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10623: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10625: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10626: @*/
10627: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10628: {
10629: PetscFunctionBegin;
10633: PetscCheckSameComm(isrow, 2, iscol, 3);
10634: PetscAssertPointer(submat, 4);
10635: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10637: if (mat->ops->getlocalsubmatrix) {
10638: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10639: } else {
10640: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10641: }
10642: (*submat)->assembled = mat->assembled;
10643: PetscFunctionReturn(PETSC_SUCCESS);
10644: }
10646: /*@
10647: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10649: Not Collective
10651: Input Parameters:
10652: + mat - matrix to extract local submatrix from
10653: . isrow - local row indices for submatrix
10654: . iscol - local column indices for submatrix
10655: - submat - the submatrix
10657: Level: intermediate
10659: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10660: @*/
10661: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10662: {
10663: PetscFunctionBegin;
10667: PetscCheckSameComm(isrow, 2, iscol, 3);
10668: PetscAssertPointer(submat, 4);
10671: if (mat->ops->restorelocalsubmatrix) {
10672: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10673: } else {
10674: PetscCall(MatDestroy(submat));
10675: }
10676: *submat = NULL;
10677: PetscFunctionReturn(PETSC_SUCCESS);
10678: }
10680: /*@
10681: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10683: Collective
10685: Input Parameter:
10686: . mat - the matrix
10688: Output Parameter:
10689: . is - if any rows have zero diagonals this contains the list of them
10691: Level: developer
10693: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10694: @*/
10695: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10696: {
10697: PetscFunctionBegin;
10700: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10701: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10703: if (!mat->ops->findzerodiagonals) {
10704: Vec diag;
10705: const PetscScalar *a;
10706: PetscInt *rows;
10707: PetscInt rStart, rEnd, r, nrow = 0;
10709: PetscCall(MatCreateVecs(mat, &diag, NULL));
10710: PetscCall(MatGetDiagonal(mat, diag));
10711: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10712: PetscCall(VecGetArrayRead(diag, &a));
10713: for (r = 0; r < rEnd - rStart; ++r)
10714: if (a[r] == 0.0) ++nrow;
10715: PetscCall(PetscMalloc1(nrow, &rows));
10716: nrow = 0;
10717: for (r = 0; r < rEnd - rStart; ++r)
10718: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10719: PetscCall(VecRestoreArrayRead(diag, &a));
10720: PetscCall(VecDestroy(&diag));
10721: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10722: } else {
10723: PetscUseTypeMethod(mat, findzerodiagonals, is);
10724: }
10725: PetscFunctionReturn(PETSC_SUCCESS);
10726: }
10728: /*@
10729: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10731: Collective
10733: Input Parameter:
10734: . mat - the matrix
10736: Output Parameter:
10737: . is - contains the list of rows with off block diagonal entries
10739: Level: developer
10741: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10742: @*/
10743: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10744: {
10745: PetscFunctionBegin;
10748: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10749: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10751: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10752: PetscFunctionReturn(PETSC_SUCCESS);
10753: }
10755: /*@C
10756: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10758: Collective; No Fortran Support
10760: Input Parameter:
10761: . mat - the matrix
10763: Output Parameter:
10764: . values - the block inverses in column major order (FORTRAN-like)
10766: Level: advanced
10768: Notes:
10769: The size of the blocks is determined by the block size of the matrix.
10771: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10773: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10775: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10776: @*/
10777: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10778: {
10779: PetscFunctionBegin;
10781: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10782: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10783: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10784: PetscFunctionReturn(PETSC_SUCCESS);
10785: }
10787: /*@
10788: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10790: Collective; No Fortran Support
10792: Input Parameters:
10793: + mat - the matrix
10794: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10795: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10797: Output Parameter:
10798: . values - the block inverses in column major order (FORTRAN-like)
10800: Level: advanced
10802: Notes:
10803: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10805: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10807: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10808: @*/
10809: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10810: {
10811: PetscFunctionBegin;
10813: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10814: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10815: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10816: PetscFunctionReturn(PETSC_SUCCESS);
10817: }
10819: /*@
10820: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10822: Collective
10824: Input Parameters:
10825: + A - the matrix
10826: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10828: Level: advanced
10830: Note:
10831: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10833: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10834: @*/
10835: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10836: {
10837: const PetscScalar *vals;
10838: PetscInt *dnnz;
10839: PetscInt m, rstart, rend, bs, i, j;
10841: PetscFunctionBegin;
10842: PetscCall(MatInvertBlockDiagonal(A, &vals));
10843: PetscCall(MatGetBlockSize(A, &bs));
10844: PetscCall(MatGetLocalSize(A, &m, NULL));
10845: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10846: PetscCall(MatSetBlockSizes(C, A->rmap->bs, A->cmap->bs));
10847: PetscCall(PetscMalloc1(m / bs, &dnnz));
10848: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10849: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10850: PetscCall(PetscFree(dnnz));
10851: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10852: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10853: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10854: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10855: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10856: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10857: PetscFunctionReturn(PETSC_SUCCESS);
10858: }
10860: /*@
10861: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10862: via `MatTransposeColoringCreate()`.
10864: Collective
10866: Input Parameter:
10867: . c - coloring context
10869: Level: intermediate
10871: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10872: @*/
10873: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10874: {
10875: MatTransposeColoring matcolor = *c;
10877: PetscFunctionBegin;
10878: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10879: if (--((PetscObject)matcolor)->refct > 0) {
10880: matcolor = NULL;
10881: PetscFunctionReturn(PETSC_SUCCESS);
10882: }
10884: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10885: PetscCall(PetscFree(matcolor->rows));
10886: PetscCall(PetscFree(matcolor->den2sp));
10887: PetscCall(PetscFree(matcolor->colorforcol));
10888: PetscCall(PetscFree(matcolor->columns));
10889: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10890: PetscCall(PetscHeaderDestroy(c));
10891: PetscFunctionReturn(PETSC_SUCCESS);
10892: }
10894: /*@
10895: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
10896: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
10897: `MatTransposeColoring` to sparse `B`.
10899: Collective
10901: Input Parameters:
10902: + coloring - coloring context created with `MatTransposeColoringCreate()`
10903: - B - sparse matrix
10905: Output Parameter:
10906: . Btdense - dense matrix $B^T$
10908: Level: developer
10910: Note:
10911: These are used internally for some implementations of `MatRARt()`
10913: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10914: @*/
10915: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10916: {
10917: PetscFunctionBegin;
10922: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10923: PetscFunctionReturn(PETSC_SUCCESS);
10924: }
10926: /*@
10927: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
10928: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
10929: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10930: $C_{sp}$ from $C_{den}$.
10932: Collective
10934: Input Parameters:
10935: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10936: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
10938: Output Parameter:
10939: . Csp - sparse matrix
10941: Level: developer
10943: Note:
10944: These are used internally for some implementations of `MatRARt()`
10946: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
10947: @*/
10948: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
10949: {
10950: PetscFunctionBegin;
10955: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
10956: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
10957: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
10958: PetscFunctionReturn(PETSC_SUCCESS);
10959: }
10961: /*@
10962: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
10964: Collective
10966: Input Parameters:
10967: + mat - the matrix product C
10968: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
10970: Output Parameter:
10971: . color - the new coloring context
10973: Level: intermediate
10975: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
10976: `MatTransColoringApplyDenToSp()`
10977: @*/
10978: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
10979: {
10980: MatTransposeColoring c;
10981: MPI_Comm comm;
10983: PetscFunctionBegin;
10984: PetscAssertPointer(color, 3);
10986: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10987: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10988: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
10989: c->ctype = iscoloring->ctype;
10990: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
10991: *color = c;
10992: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10993: PetscFunctionReturn(PETSC_SUCCESS);
10994: }
10996: /*@
10997: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
10998: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
11000: Not Collective
11002: Input Parameter:
11003: . mat - the matrix
11005: Output Parameter:
11006: . state - the current state
11008: Level: intermediate
11010: Notes:
11011: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
11012: different matrices
11014: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
11016: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
11018: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
11019: @*/
11020: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
11021: {
11022: PetscFunctionBegin;
11024: *state = mat->nonzerostate;
11025: PetscFunctionReturn(PETSC_SUCCESS);
11026: }
11028: /*@
11029: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
11030: matrices from each processor
11032: Collective
11034: Input Parameters:
11035: + comm - the communicators the parallel matrix will live on
11036: . seqmat - the input sequential matrices
11037: . n - number of local columns (or `PETSC_DECIDE`)
11038: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11040: Output Parameter:
11041: . mpimat - the parallel matrix generated
11043: Level: developer
11045: Note:
11046: The number of columns of the matrix in EACH processor MUST be the same.
11048: .seealso: [](ch_matrices), `Mat`
11049: @*/
11050: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11051: {
11052: PetscMPIInt size;
11054: PetscFunctionBegin;
11055: PetscCallMPI(MPI_Comm_size(comm, &size));
11056: if (size == 1) {
11057: if (reuse == MAT_INITIAL_MATRIX) {
11058: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11059: } else {
11060: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11061: }
11062: PetscFunctionReturn(PETSC_SUCCESS);
11063: }
11065: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
11067: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11068: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11069: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11070: PetscFunctionReturn(PETSC_SUCCESS);
11071: }
11073: /*@
11074: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
11076: Collective
11078: Input Parameters:
11079: + A - the matrix to create subdomains from
11080: - N - requested number of subdomains
11082: Output Parameters:
11083: + n - number of subdomains resulting on this MPI process
11084: - iss - `IS` list with indices of subdomains on this MPI process
11086: Level: advanced
11088: Note:
11089: The number of subdomains must be smaller than the communicator size
11091: .seealso: [](ch_matrices), `Mat`, `IS`
11092: @*/
11093: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11094: {
11095: MPI_Comm comm, subcomm;
11096: PetscMPIInt size, rank, color;
11097: PetscInt rstart, rend, k;
11099: PetscFunctionBegin;
11100: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11101: PetscCallMPI(MPI_Comm_size(comm, &size));
11102: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11103: PetscCheck(N >= 1 && N < size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11104: *n = 1;
11105: k = size / N + (size % N > 0); /* There are up to k ranks to a color */
11106: color = rank / k;
11107: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11108: PetscCall(PetscMalloc1(1, iss));
11109: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11110: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11111: PetscCallMPI(MPI_Comm_free(&subcomm));
11112: PetscFunctionReturn(PETSC_SUCCESS);
11113: }
11115: /*@
11116: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11118: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11119: If they are not the same, uses `MatMatMatMult()`.
11121: Once the coarse grid problem is constructed, correct for interpolation operators
11122: that are not of full rank, which can legitimately happen in the case of non-nested
11123: geometric multigrid.
11125: Input Parameters:
11126: + restrct - restriction operator
11127: . dA - fine grid matrix
11128: . interpolate - interpolation operator
11129: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11130: - fill - expected fill, use `PETSC_DETERMINE` or `PETSC_DETERMINE` if you do not have a good estimate
11132: Output Parameter:
11133: . A - the Galerkin coarse matrix
11135: Options Database Key:
11136: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
11138: Level: developer
11140: Note:
11141: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
11143: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11144: @*/
11145: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11146: {
11147: IS zerorows;
11148: Vec diag;
11150: PetscFunctionBegin;
11151: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11152: /* Construct the coarse grid matrix */
11153: if (interpolate == restrct) {
11154: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11155: } else {
11156: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11157: }
11159: /* If the interpolation matrix is not of full rank, A will have zero rows.
11160: This can legitimately happen in the case of non-nested geometric multigrid.
11161: In that event, we set the rows of the matrix to the rows of the identity,
11162: ignoring the equations (as the RHS will also be zero). */
11164: PetscCall(MatFindZeroRows(*A, &zerorows));
11166: if (zerorows != NULL) { /* if there are any zero rows */
11167: PetscCall(MatCreateVecs(*A, &diag, NULL));
11168: PetscCall(MatGetDiagonal(*A, diag));
11169: PetscCall(VecISSet(diag, zerorows, 1.0));
11170: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11171: PetscCall(VecDestroy(&diag));
11172: PetscCall(ISDestroy(&zerorows));
11173: }
11174: PetscFunctionReturn(PETSC_SUCCESS);
11175: }
11177: /*@C
11178: MatSetOperation - Allows user to set a matrix operation for any matrix type
11180: Logically Collective
11182: Input Parameters:
11183: + mat - the matrix
11184: . op - the name of the operation
11185: - f - the function that provides the operation
11187: Level: developer
11189: Example Usage:
11190: .vb
11191: extern PetscErrorCode usermult(Mat, Vec, Vec);
11193: PetscCall(MatCreateXXX(comm, ..., &A));
11194: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscErrorCodeFn *)usermult));
11195: .ve
11197: Notes:
11198: See the file `include/petscmat.h` for a complete list of matrix
11199: operations, which all have the form MATOP_<OPERATION>, where
11200: <OPERATION> is the name (in all capital letters) of the
11201: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11203: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11204: sequence as the usual matrix interface routines, since they
11205: are intended to be accessed via the usual matrix interface
11206: routines, e.g.,
11207: .vb
11208: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11209: .ve
11211: In particular each function MUST return `PETSC_SUCCESS` on success and
11212: nonzero on failure.
11214: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11216: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11217: @*/
11218: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, PetscErrorCodeFn *f)
11219: {
11220: PetscFunctionBegin;
11222: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (PetscErrorCodeFn *)mat->ops->view) mat->ops->viewnative = mat->ops->view;
11223: (((PetscErrorCodeFn **)mat->ops)[op]) = f;
11224: PetscFunctionReturn(PETSC_SUCCESS);
11225: }
11227: /*@C
11228: MatGetOperation - Gets a matrix operation for any matrix type.
11230: Not Collective
11232: Input Parameters:
11233: + mat - the matrix
11234: - op - the name of the operation
11236: Output Parameter:
11237: . f - the function that provides the operation
11239: Level: developer
11241: Example Usage:
11242: .vb
11243: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11245: MatGetOperation(A, MATOP_MULT, (PetscErrorCodeFn **)&usermult);
11246: .ve
11248: Notes:
11249: See the file `include/petscmat.h` for a complete list of matrix
11250: operations, which all have the form MATOP_<OPERATION>, where
11251: <OPERATION> is the name (in all capital letters) of the
11252: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11254: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11256: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11257: @*/
11258: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, PetscErrorCodeFn **f)
11259: {
11260: PetscFunctionBegin;
11262: *f = (((PetscErrorCodeFn **)mat->ops)[op]);
11263: PetscFunctionReturn(PETSC_SUCCESS);
11264: }
11266: /*@
11267: MatHasOperation - Determines whether the given matrix supports the particular operation.
11269: Not Collective
11271: Input Parameters:
11272: + mat - the matrix
11273: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11275: Output Parameter:
11276: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11278: Level: advanced
11280: Note:
11281: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11283: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11284: @*/
11285: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11286: {
11287: PetscFunctionBegin;
11289: PetscAssertPointer(has, 3);
11290: if (mat->ops->hasoperation) {
11291: PetscUseTypeMethod(mat, hasoperation, op, has);
11292: } else {
11293: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11294: else {
11295: *has = PETSC_FALSE;
11296: if (op == MATOP_CREATE_SUBMATRIX) {
11297: PetscMPIInt size;
11299: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11300: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11301: }
11302: }
11303: }
11304: PetscFunctionReturn(PETSC_SUCCESS);
11305: }
11307: /*@
11308: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11310: Collective
11312: Input Parameter:
11313: . mat - the matrix
11315: Output Parameter:
11316: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11318: Level: beginner
11320: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11321: @*/
11322: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11323: {
11324: PetscFunctionBegin;
11327: PetscAssertPointer(cong, 2);
11328: if (!mat->rmap || !mat->cmap) {
11329: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11330: PetscFunctionReturn(PETSC_SUCCESS);
11331: }
11332: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11333: PetscCall(PetscLayoutSetUp(mat->rmap));
11334: PetscCall(PetscLayoutSetUp(mat->cmap));
11335: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11336: if (*cong) mat->congruentlayouts = 1;
11337: else mat->congruentlayouts = 0;
11338: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11339: PetscFunctionReturn(PETSC_SUCCESS);
11340: }
11342: PetscErrorCode MatSetInf(Mat A)
11343: {
11344: PetscFunctionBegin;
11345: PetscUseTypeMethod(A, setinf);
11346: PetscFunctionReturn(PETSC_SUCCESS);
11347: }
11349: /*@
11350: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11351: and possibly removes small values from the graph structure.
11353: Collective
11355: Input Parameters:
11356: + A - the matrix
11357: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11358: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11359: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11360: . num_idx - size of 'index' array
11361: - index - array of block indices to use for graph strength of connection weight
11363: Output Parameter:
11364: . graph - the resulting graph
11366: Level: advanced
11368: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11369: @*/
11370: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11371: {
11372: PetscFunctionBegin;
11376: PetscAssertPointer(graph, 7);
11377: PetscCall(PetscLogEventBegin(MAT_CreateGraph, A, 0, 0, 0));
11378: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11379: PetscCall(PetscLogEventEnd(MAT_CreateGraph, A, 0, 0, 0));
11380: PetscFunctionReturn(PETSC_SUCCESS);
11381: }
11383: /*@
11384: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11385: meaning the same memory is used for the matrix, and no new memory is allocated.
11387: Collective
11389: Input Parameters:
11390: + A - the matrix
11391: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11393: Level: intermediate
11395: Developer Note:
11396: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11397: of the arrays in the data structure are unneeded.
11399: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11400: @*/
11401: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11402: {
11403: PetscFunctionBegin;
11405: PetscUseTypeMethod(A, eliminatezeros, keep);
11406: PetscFunctionReturn(PETSC_SUCCESS);
11407: }
11409: /*@C
11410: MatGetCurrentMemType - Get the memory location of the matrix
11412: Not Collective, but the result will be the same on all MPI processes
11414: Input Parameter:
11415: . A - the matrix whose memory type we are checking
11417: Output Parameter:
11418: . m - the memory type
11420: Level: intermediate
11422: .seealso: [](ch_matrices), `Mat`, `MatBoundToCPU()`, `PetscMemType`
11423: @*/
11424: PetscErrorCode MatGetCurrentMemType(Mat A, PetscMemType *m)
11425: {
11426: PetscFunctionBegin;
11428: PetscAssertPointer(m, 2);
11429: if (A->ops->getcurrentmemtype) PetscUseTypeMethod(A, getcurrentmemtype, m);
11430: else *m = PETSC_MEMTYPE_HOST;
11431: PetscFunctionReturn(PETSC_SUCCESS);
11432: }