Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_CreateGraph;
40: PetscLogEvent MAT_SetValuesBatch;
41: PetscLogEvent MAT_ViennaCLCopyToGPU;
42: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
43: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
44: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
45: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
46: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
47: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
49: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
51: /*@
52: MatSetRandom - Sets all components of a matrix to random numbers.
54: Logically Collective
56: Input Parameters:
57: + x - the matrix
58: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
59: it will create one internally.
61: Example:
62: .vb
63: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
64: MatSetRandom(x,rctx);
65: PetscRandomDestroy(rctx);
66: .ve
68: Level: intermediate
70: Notes:
71: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
73: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
75: It generates an error if used on unassembled sparse matrices that have not been preallocated.
77: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
78: @*/
79: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
80: {
81: PetscRandom randObj = NULL;
83: PetscFunctionBegin;
87: MatCheckPreallocated(x, 1);
89: if (!rctx) {
90: MPI_Comm comm;
91: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
92: PetscCall(PetscRandomCreate(comm, &randObj));
93: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
94: PetscCall(PetscRandomSetFromOptions(randObj));
95: rctx = randObj;
96: }
97: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
98: PetscUseTypeMethod(x, setrandom, rctx);
99: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
101: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
103: PetscCall(PetscRandomDestroy(&randObj));
104: PetscFunctionReturn(PETSC_SUCCESS);
105: }
107: /*@
108: MatCopyHashToXAIJ - copy hash table entries into an XAIJ matrix type
110: Logically Collective
112: Input Parameter:
113: . A - A matrix in unassembled, hash table form
115: Output Parameter:
116: . B - The XAIJ matrix. This can either be `A` or some matrix of equivalent size, e.g. obtained from `A` via `MatDuplicate()`
118: Example:
119: .vb
120: PetscCall(MatDuplicate(A, MAT_DO_NOT_COPY_VALUES, &B));
121: PetscCall(MatCopyHashToXAIJ(A, B));
122: .ve
124: Level: advanced
126: Notes:
127: If `B` is `A`, then the hash table data structure will be destroyed. `B` is assembled
129: .seealso: [](ch_matrices), `Mat`, `MAT_USE_HASH_TABLE`
130: @*/
131: PetscErrorCode MatCopyHashToXAIJ(Mat A, Mat B)
132: {
133: PetscFunctionBegin;
135: PetscUseTypeMethod(A, copyhashtoxaij, B);
136: PetscFunctionReturn(PETSC_SUCCESS);
137: }
139: /*@
140: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
142: Logically Collective
144: Input Parameter:
145: . mat - the factored matrix
147: Output Parameters:
148: + pivot - the pivot value computed
149: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
150: the share the matrix
152: Level: advanced
154: Notes:
155: This routine does not work for factorizations done with external packages.
157: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
159: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
161: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
162: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
163: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
164: @*/
165: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
166: {
167: PetscFunctionBegin;
169: PetscAssertPointer(pivot, 2);
170: PetscAssertPointer(row, 3);
171: *pivot = mat->factorerror_zeropivot_value;
172: *row = mat->factorerror_zeropivot_row;
173: PetscFunctionReturn(PETSC_SUCCESS);
174: }
176: /*@
177: MatFactorGetError - gets the error code from a factorization
179: Logically Collective
181: Input Parameter:
182: . mat - the factored matrix
184: Output Parameter:
185: . err - the error code
187: Level: advanced
189: Note:
190: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
192: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
193: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
194: @*/
195: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
196: {
197: PetscFunctionBegin;
199: PetscAssertPointer(err, 2);
200: *err = mat->factorerrortype;
201: PetscFunctionReturn(PETSC_SUCCESS);
202: }
204: /*@
205: MatFactorClearError - clears the error code in a factorization
207: Logically Collective
209: Input Parameter:
210: . mat - the factored matrix
212: Level: developer
214: Note:
215: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
217: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
218: `MatGetErrorCode()`, `MatFactorError`
219: @*/
220: PetscErrorCode MatFactorClearError(Mat mat)
221: {
222: PetscFunctionBegin;
224: mat->factorerrortype = MAT_FACTOR_NOERROR;
225: mat->factorerror_zeropivot_value = 0.0;
226: mat->factorerror_zeropivot_row = 0;
227: PetscFunctionReturn(PETSC_SUCCESS);
228: }
230: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
231: {
232: Vec r, l;
233: const PetscScalar *al;
234: PetscInt i, nz, gnz, N, n, st;
236: PetscFunctionBegin;
237: PetscCall(MatCreateVecs(mat, &r, &l));
238: if (!cols) { /* nonzero rows */
239: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
240: PetscCall(MatGetSize(mat, &N, NULL));
241: PetscCall(MatGetLocalSize(mat, &n, NULL));
242: PetscCall(VecSet(l, 0.0));
243: PetscCall(VecSetRandom(r, NULL));
244: PetscCall(MatMult(mat, r, l));
245: PetscCall(VecGetArrayRead(l, &al));
246: } else { /* nonzero columns */
247: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
248: PetscCall(MatGetSize(mat, NULL, &N));
249: PetscCall(MatGetLocalSize(mat, NULL, &n));
250: PetscCall(VecSet(r, 0.0));
251: PetscCall(VecSetRandom(l, NULL));
252: PetscCall(MatMultTranspose(mat, l, r));
253: PetscCall(VecGetArrayRead(r, &al));
254: }
255: if (tol <= 0.0) {
256: for (i = 0, nz = 0; i < n; i++)
257: if (al[i] != 0.0) nz++;
258: } else {
259: for (i = 0, nz = 0; i < n; i++)
260: if (PetscAbsScalar(al[i]) > tol) nz++;
261: }
262: PetscCallMPI(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
263: if (gnz != N) {
264: PetscInt *nzr;
265: PetscCall(PetscMalloc1(nz, &nzr));
266: if (nz) {
267: if (tol < 0) {
268: for (i = 0, nz = 0; i < n; i++)
269: if (al[i] != 0.0) nzr[nz++] = i + st;
270: } else {
271: for (i = 0, nz = 0; i < n; i++)
272: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
273: }
274: }
275: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
276: } else *nonzero = NULL;
277: if (!cols) { /* nonzero rows */
278: PetscCall(VecRestoreArrayRead(l, &al));
279: } else {
280: PetscCall(VecRestoreArrayRead(r, &al));
281: }
282: PetscCall(VecDestroy(&l));
283: PetscCall(VecDestroy(&r));
284: PetscFunctionReturn(PETSC_SUCCESS);
285: }
287: /*@
288: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
290: Input Parameter:
291: . mat - the matrix
293: Output Parameter:
294: . keptrows - the rows that are not completely zero
296: Level: intermediate
298: Note:
299: `keptrows` is set to `NULL` if all rows are nonzero.
301: Developer Note:
302: If `keptrows` is not `NULL`, it must be sorted.
304: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
305: @*/
306: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
307: {
308: PetscFunctionBegin;
311: PetscAssertPointer(keptrows, 2);
312: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
313: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
314: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
315: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
316: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
317: PetscFunctionReturn(PETSC_SUCCESS);
318: }
320: /*@
321: MatFindZeroRows - Locate all rows that are completely zero in the matrix
323: Input Parameter:
324: . mat - the matrix
326: Output Parameter:
327: . zerorows - the rows that are completely zero
329: Level: intermediate
331: Note:
332: `zerorows` is set to `NULL` if no rows are zero.
334: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
335: @*/
336: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
337: {
338: IS keptrows;
339: PetscInt m, n;
341: PetscFunctionBegin;
344: PetscAssertPointer(zerorows, 2);
345: PetscCall(MatFindNonzeroRows(mat, &keptrows));
346: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
347: In keeping with this convention, we set zerorows to NULL if there are no zero
348: rows. */
349: if (keptrows == NULL) {
350: *zerorows = NULL;
351: } else {
352: PetscCall(MatGetOwnershipRange(mat, &m, &n));
353: PetscCall(ISComplement(keptrows, m, n, zerorows));
354: PetscCall(ISDestroy(&keptrows));
355: }
356: PetscFunctionReturn(PETSC_SUCCESS);
357: }
359: /*@
360: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
362: Not Collective
364: Input Parameter:
365: . A - the matrix
367: Output Parameter:
368: . a - the diagonal part (which is a SEQUENTIAL matrix)
370: Level: advanced
372: Notes:
373: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
375: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
377: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
378: @*/
379: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
380: {
381: PetscFunctionBegin;
384: PetscAssertPointer(a, 2);
385: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
386: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
387: else {
388: PetscMPIInt size;
390: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
391: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
392: *a = A;
393: }
394: PetscFunctionReturn(PETSC_SUCCESS);
395: }
397: /*@
398: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
400: Collective
402: Input Parameter:
403: . mat - the matrix
405: Output Parameter:
406: . trace - the sum of the diagonal entries
408: Level: advanced
410: .seealso: [](ch_matrices), `Mat`
411: @*/
412: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
413: {
414: Vec diag;
416: PetscFunctionBegin;
418: PetscAssertPointer(trace, 2);
419: PetscCall(MatCreateVecs(mat, &diag, NULL));
420: PetscCall(MatGetDiagonal(mat, diag));
421: PetscCall(VecSum(diag, trace));
422: PetscCall(VecDestroy(&diag));
423: PetscFunctionReturn(PETSC_SUCCESS);
424: }
426: /*@
427: MatRealPart - Zeros out the imaginary part of the matrix
429: Logically Collective
431: Input Parameter:
432: . mat - the matrix
434: Level: advanced
436: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
437: @*/
438: PetscErrorCode MatRealPart(Mat mat)
439: {
440: PetscFunctionBegin;
443: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
444: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
445: MatCheckPreallocated(mat, 1);
446: PetscUseTypeMethod(mat, realpart);
447: PetscFunctionReturn(PETSC_SUCCESS);
448: }
450: /*@C
451: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
453: Collective
455: Input Parameter:
456: . mat - the matrix
458: Output Parameters:
459: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
460: - ghosts - the global indices of the ghost points
462: Level: advanced
464: Note:
465: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
467: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
468: @*/
469: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
470: {
471: PetscFunctionBegin;
474: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
475: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
476: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
477: else {
478: if (nghosts) *nghosts = 0;
479: if (ghosts) *ghosts = NULL;
480: }
481: PetscFunctionReturn(PETSC_SUCCESS);
482: }
484: /*@
485: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
487: Logically Collective
489: Input Parameter:
490: . mat - the matrix
492: Level: advanced
494: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
495: @*/
496: PetscErrorCode MatImaginaryPart(Mat mat)
497: {
498: PetscFunctionBegin;
501: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
502: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
503: MatCheckPreallocated(mat, 1);
504: PetscUseTypeMethod(mat, imaginarypart);
505: PetscFunctionReturn(PETSC_SUCCESS);
506: }
508: /*@
509: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure
511: Not Collective
513: Input Parameter:
514: . mat - the matrix
516: Output Parameters:
517: + missing - is any diagonal entry missing
518: - dd - first diagonal entry that is missing (optional) on this process
520: Level: advanced
522: Note:
523: This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value
525: .seealso: [](ch_matrices), `Mat`
526: @*/
527: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
528: {
529: PetscFunctionBegin;
532: PetscAssertPointer(missing, 2);
533: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
534: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
535: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
536: PetscFunctionReturn(PETSC_SUCCESS);
537: }
539: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
540: /*@C
541: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
542: for each row that you get to ensure that your application does
543: not bleed memory.
545: Not Collective
547: Input Parameters:
548: + mat - the matrix
549: - row - the row to get
551: Output Parameters:
552: + ncols - if not `NULL`, the number of nonzeros in `row`
553: . cols - if not `NULL`, the column numbers
554: - vals - if not `NULL`, the numerical values
556: Level: advanced
558: Notes:
559: This routine is provided for people who need to have direct access
560: to the structure of a matrix. We hope that we provide enough
561: high-level matrix routines that few users will need it.
563: `MatGetRow()` always returns 0-based column indices, regardless of
564: whether the internal representation is 0-based (default) or 1-based.
566: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
567: not wish to extract these quantities.
569: The user can only examine the values extracted with `MatGetRow()`;
570: the values CANNOT be altered. To change the matrix entries, one
571: must use `MatSetValues()`.
573: You can only have one call to `MatGetRow()` outstanding for a particular
574: matrix at a time, per processor. `MatGetRow()` can only obtain rows
575: associated with the given processor, it cannot get rows from the
576: other processors; for that we suggest using `MatCreateSubMatrices()`, then
577: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
578: is in the global number of rows.
580: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
582: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
584: Fortran Note:
585: .vb
586: PetscInt, pointer :: cols(:)
587: PetscScalar, pointer :: vals(:)
588: .ve
590: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
591: @*/
592: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
593: {
594: PetscInt incols;
596: PetscFunctionBegin;
599: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
600: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
601: MatCheckPreallocated(mat, 1);
602: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
603: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
604: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
605: if (ncols) *ncols = incols;
606: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
607: PetscFunctionReturn(PETSC_SUCCESS);
608: }
610: /*@
611: MatConjugate - replaces the matrix values with their complex conjugates
613: Logically Collective
615: Input Parameter:
616: . mat - the matrix
618: Level: advanced
620: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
621: @*/
622: PetscErrorCode MatConjugate(Mat mat)
623: {
624: PetscFunctionBegin;
626: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
627: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
628: PetscUseTypeMethod(mat, conjugate);
629: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
630: }
631: PetscFunctionReturn(PETSC_SUCCESS);
632: }
634: /*@C
635: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
637: Not Collective
639: Input Parameters:
640: + mat - the matrix
641: . row - the row to get
642: . ncols - the number of nonzeros
643: . cols - the columns of the nonzeros
644: - vals - if nonzero the column values
646: Level: advanced
648: Notes:
649: This routine should be called after you have finished examining the entries.
651: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
652: us of the array after it has been restored. If you pass `NULL`, it will
653: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
655: Fortran Note:
656: .vb
657: PetscInt, pointer :: cols(:)
658: PetscScalar, pointer :: vals(:)
659: .ve
661: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
662: @*/
663: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
664: {
665: PetscFunctionBegin;
667: if (ncols) PetscAssertPointer(ncols, 3);
668: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
669: PetscTryTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
670: if (ncols) *ncols = 0;
671: if (cols) *cols = NULL;
672: if (vals) *vals = NULL;
673: PetscFunctionReturn(PETSC_SUCCESS);
674: }
676: /*@
677: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
678: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
680: Not Collective
682: Input Parameter:
683: . mat - the matrix
685: Level: advanced
687: Note:
688: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
690: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
691: @*/
692: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
693: {
694: PetscFunctionBegin;
697: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
698: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
699: MatCheckPreallocated(mat, 1);
700: PetscTryTypeMethod(mat, getrowuppertriangular);
701: PetscFunctionReturn(PETSC_SUCCESS);
702: }
704: /*@
705: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
707: Not Collective
709: Input Parameter:
710: . mat - the matrix
712: Level: advanced
714: Note:
715: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
717: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
718: @*/
719: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
720: {
721: PetscFunctionBegin;
724: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
725: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
726: MatCheckPreallocated(mat, 1);
727: PetscTryTypeMethod(mat, restorerowuppertriangular);
728: PetscFunctionReturn(PETSC_SUCCESS);
729: }
731: /*@
732: MatSetOptionsPrefix - Sets the prefix used for searching for all
733: `Mat` options in the database.
735: Logically Collective
737: Input Parameters:
738: + A - the matrix
739: - prefix - the prefix to prepend to all option names
741: Level: advanced
743: Notes:
744: A hyphen (-) must NOT be given at the beginning of the prefix name.
745: The first character of all runtime options is AUTOMATICALLY the hyphen.
747: This is NOT used for options for the factorization of the matrix. Normally the
748: prefix is automatically passed in from the PC calling the factorization. To set
749: it directly use `MatSetOptionsPrefixFactor()`
751: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
752: @*/
753: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
754: {
755: PetscFunctionBegin;
757: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
758: PetscTryMethod(A, "MatSetOptionsPrefix_C", (Mat, const char[]), (A, prefix));
759: PetscFunctionReturn(PETSC_SUCCESS);
760: }
762: /*@
763: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
764: for matrices created with `MatGetFactor()`
766: Logically Collective
768: Input Parameters:
769: + A - the matrix
770: - prefix - the prefix to prepend to all option names for the factored matrix
772: Level: developer
774: Notes:
775: A hyphen (-) must NOT be given at the beginning of the prefix name.
776: The first character of all runtime options is AUTOMATICALLY the hyphen.
778: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
779: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
781: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
782: @*/
783: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
784: {
785: PetscFunctionBegin;
787: if (prefix) {
788: PetscAssertPointer(prefix, 2);
789: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
790: if (prefix != A->factorprefix) {
791: PetscCall(PetscFree(A->factorprefix));
792: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
793: }
794: } else PetscCall(PetscFree(A->factorprefix));
795: PetscFunctionReturn(PETSC_SUCCESS);
796: }
798: /*@
799: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
800: for matrices created with `MatGetFactor()`
802: Logically Collective
804: Input Parameters:
805: + A - the matrix
806: - prefix - the prefix to prepend to all option names for the factored matrix
808: Level: developer
810: Notes:
811: A hyphen (-) must NOT be given at the beginning of the prefix name.
812: The first character of all runtime options is AUTOMATICALLY the hyphen.
814: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
815: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
817: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
818: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
819: `MatSetOptionsPrefix()`
820: @*/
821: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
822: {
823: size_t len1, len2, new_len;
825: PetscFunctionBegin;
827: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
828: if (!A->factorprefix) {
829: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
830: PetscFunctionReturn(PETSC_SUCCESS);
831: }
832: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
834: PetscCall(PetscStrlen(A->factorprefix, &len1));
835: PetscCall(PetscStrlen(prefix, &len2));
836: new_len = len1 + len2 + 1;
837: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
838: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
839: PetscFunctionReturn(PETSC_SUCCESS);
840: }
842: /*@
843: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
844: matrix options in the database.
846: Logically Collective
848: Input Parameters:
849: + A - the matrix
850: - prefix - the prefix to prepend to all option names
852: Level: advanced
854: Note:
855: A hyphen (-) must NOT be given at the beginning of the prefix name.
856: The first character of all runtime options is AUTOMATICALLY the hyphen.
858: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
859: @*/
860: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
861: {
862: PetscFunctionBegin;
864: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
865: PetscTryMethod(A, "MatAppendOptionsPrefix_C", (Mat, const char[]), (A, prefix));
866: PetscFunctionReturn(PETSC_SUCCESS);
867: }
869: /*@
870: MatGetOptionsPrefix - Gets the prefix used for searching for all
871: matrix options in the database.
873: Not Collective
875: Input Parameter:
876: . A - the matrix
878: Output Parameter:
879: . prefix - pointer to the prefix string used
881: Level: advanced
883: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
884: @*/
885: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
886: {
887: PetscFunctionBegin;
889: PetscAssertPointer(prefix, 2);
890: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
891: PetscFunctionReturn(PETSC_SUCCESS);
892: }
894: /*@
895: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
897: Not Collective
899: Input Parameter:
900: . A - the matrix
902: Output Parameter:
903: . state - the object state
905: Level: advanced
907: Note:
908: Object state is an integer which gets increased every time
909: the object is changed. By saving and later querying the object state
910: one can determine whether information about the object is still current.
912: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
914: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
915: @*/
916: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
917: {
918: PetscFunctionBegin;
920: PetscAssertPointer(state, 2);
921: PetscCall(PetscObjectStateGet((PetscObject)A, state));
922: PetscFunctionReturn(PETSC_SUCCESS);
923: }
925: /*@
926: MatResetPreallocation - Reset matrix to use the original preallocation values provided by the user, for example with `MatXAIJSetPreallocation()`
928: Collective
930: Input Parameter:
931: . A - the matrix
933: Level: beginner
935: Notes:
936: After calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY` the matrix data structures represent the nonzeros assigned to the
937: matrix. If that space is less than the preallocated space that extra preallocated space is no longer available to take on new values. `MatResetPreallocation()`
938: makes all of the preallocation space available
940: Current values in the matrix are lost in this call
942: Currently only supported for `MATAIJ` matrices.
944: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
945: @*/
946: PetscErrorCode MatResetPreallocation(Mat A)
947: {
948: PetscFunctionBegin;
951: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
952: PetscFunctionReturn(PETSC_SUCCESS);
953: }
955: /*@
956: MatResetHash - Reset the matrix so that it will use a hash table for the next round of `MatSetValues()` and `MatAssemblyBegin()`/`MatAssemblyEnd()`.
958: Collective
960: Input Parameter:
961: . A - the matrix
963: Level: intermediate
965: Notes:
966: The matrix will again delete the hash table data structures after following calls to `MatAssemblyBegin()`/`MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
968: Currently only supported for `MATAIJ` matrices.
970: .seealso: [](ch_matrices), `Mat`, `MatResetPreallocation()`
971: @*/
972: PetscErrorCode MatResetHash(Mat A)
973: {
974: PetscFunctionBegin;
977: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset to hash state after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
978: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
979: PetscUseMethod(A, "MatResetHash_C", (Mat), (A));
980: /* These flags are used to determine whether certain setups occur */
981: A->was_assembled = PETSC_FALSE;
982: A->assembled = PETSC_FALSE;
983: /* Log that the state of this object has changed; this will help guarantee that preconditioners get re-setup */
984: PetscCall(PetscObjectStateIncrease((PetscObject)A));
985: PetscFunctionReturn(PETSC_SUCCESS);
986: }
988: /*@
989: MatSetUp - Sets up the internal matrix data structures for later use by the matrix
991: Collective
993: Input Parameter:
994: . A - the matrix
996: Level: advanced
998: Notes:
999: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
1000: setting values in the matrix.
1002: This routine is called internally by other `Mat` functions when needed so rarely needs to be called by users
1004: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
1005: @*/
1006: PetscErrorCode MatSetUp(Mat A)
1007: {
1008: PetscFunctionBegin;
1010: if (!((PetscObject)A)->type_name) {
1011: PetscMPIInt size;
1013: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1014: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
1015: }
1016: if (!A->preallocated) PetscTryTypeMethod(A, setup);
1017: PetscCall(PetscLayoutSetUp(A->rmap));
1018: PetscCall(PetscLayoutSetUp(A->cmap));
1019: A->preallocated = PETSC_TRUE;
1020: PetscFunctionReturn(PETSC_SUCCESS);
1021: }
1023: #if defined(PETSC_HAVE_SAWS)
1024: #include <petscviewersaws.h>
1025: #endif
1027: /*
1028: If threadsafety is on extraneous matrices may be printed
1030: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
1031: */
1032: #if !defined(PETSC_HAVE_THREADSAFETY)
1033: static PetscInt insidematview = 0;
1034: #endif
1036: /*@
1037: MatViewFromOptions - View properties of the matrix based on options set in the options database
1039: Collective
1041: Input Parameters:
1042: + A - the matrix
1043: . obj - optional additional object that provides the options prefix to use
1044: - name - command line option
1046: Options Database Key:
1047: . -mat_view [viewertype]:... - the viewer and its options
1049: Level: intermediate
1051: Note:
1052: .vb
1053: If no value is provided ascii:stdout is used
1054: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
1055: for example ascii::ascii_info prints just the information about the object not all details
1056: unless :append is given filename opens in write mode, overwriting what was already there
1057: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
1058: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
1059: socket[:port] defaults to the standard output port
1060: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
1061: .ve
1063: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1064: @*/
1065: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1066: {
1067: PetscFunctionBegin;
1069: #if !defined(PETSC_HAVE_THREADSAFETY)
1070: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1071: #endif
1072: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1073: PetscFunctionReturn(PETSC_SUCCESS);
1074: }
1076: /*@
1077: MatView - display information about a matrix in a variety ways
1079: Collective on viewer
1081: Input Parameters:
1082: + mat - the matrix
1083: - viewer - visualization context
1085: Options Database Keys:
1086: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1087: . -mat_view ::ascii_info_detail - Prints more detailed info
1088: . -mat_view - Prints matrix in ASCII format
1089: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1090: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1091: . -display <name> - Sets display name (default is host)
1092: . -draw_pause <sec> - Sets number of seconds to pause after display
1093: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1094: . -viewer_socket_machine <machine> - -
1095: . -viewer_socket_port <port> - -
1096: . -mat_view binary - save matrix to file in binary format
1097: - -viewer_binary_filename <name> - -
1099: Level: beginner
1101: Notes:
1102: The available visualization contexts include
1103: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1104: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1105: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1106: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1108: The user can open alternative visualization contexts with
1109: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1110: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a specified file; corresponding input uses `MatLoad()`
1111: . `PetscViewerDrawOpen()` - Outputs nonzero matrix nonzero structure to an X window display
1112: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer, `PETSCVIEWERSOCKET`. Only the `MATSEQDENSE` and `MATAIJ` types support this viewer.
1114: The user can call `PetscViewerPushFormat()` to specify the output
1115: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1116: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1117: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1118: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1119: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1120: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse format common among all matrix types
1121: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific format (which is in many cases the same as the default)
1122: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix size and structure (not the matrix entries)
1123: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about the matrix nonzero structure (still not vector or matrix entries)
1125: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1126: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1128: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1130: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1131: viewer is used.
1133: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1134: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1136: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1137: and then use the following mouse functions.
1138: .vb
1139: left mouse: zoom in
1140: middle mouse: zoom out
1141: right mouse: continue with the simulation
1142: .ve
1144: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1145: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1146: @*/
1147: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1148: {
1149: PetscInt rows, cols, rbs, cbs;
1150: PetscBool isascii, isstring, issaws;
1151: PetscViewerFormat format;
1152: PetscMPIInt size;
1154: PetscFunctionBegin;
1157: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1160: PetscCall(PetscViewerGetFormat(viewer, &format));
1161: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1162: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1164: #if !defined(PETSC_HAVE_THREADSAFETY)
1165: insidematview++;
1166: #endif
1167: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1168: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1169: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1170: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1172: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1173: if (isascii) {
1174: if (!mat->preallocated) {
1175: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1176: #if !defined(PETSC_HAVE_THREADSAFETY)
1177: insidematview--;
1178: #endif
1179: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1180: PetscFunctionReturn(PETSC_SUCCESS);
1181: }
1182: if (!mat->assembled) {
1183: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1184: #if !defined(PETSC_HAVE_THREADSAFETY)
1185: insidematview--;
1186: #endif
1187: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1188: PetscFunctionReturn(PETSC_SUCCESS);
1189: }
1190: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1191: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1192: MatNullSpace nullsp, transnullsp;
1194: PetscCall(PetscViewerASCIIPushTab(viewer));
1195: PetscCall(MatGetSize(mat, &rows, &cols));
1196: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1197: if (rbs != 1 || cbs != 1) {
1198: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1199: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1200: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1201: if (mat->factortype) {
1202: MatSolverType solver;
1203: PetscCall(MatFactorGetSolverType(mat, &solver));
1204: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1205: }
1206: if (mat->ops->getinfo) {
1207: PetscBool is_constant_or_diagonal;
1209: // Don't print nonzero information for constant or diagonal matrices, it just adds noise to the output
1210: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &is_constant_or_diagonal, MATCONSTANTDIAGONAL, MATDIAGONAL, ""));
1211: if (!is_constant_or_diagonal) {
1212: MatInfo info;
1214: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1215: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1216: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1217: }
1218: }
1219: PetscCall(MatGetNullSpace(mat, &nullsp));
1220: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1221: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1222: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1223: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1224: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1225: PetscCall(PetscViewerASCIIPushTab(viewer));
1226: PetscCall(MatProductView(mat, viewer));
1227: PetscCall(PetscViewerASCIIPopTab(viewer));
1228: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1229: IS tmp;
1231: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1232: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1233: PetscCall(PetscViewerASCIIPushTab(viewer));
1234: PetscCall(ISView(tmp, viewer));
1235: PetscCall(PetscViewerASCIIPopTab(viewer));
1236: PetscCall(ISDestroy(&tmp));
1237: }
1238: }
1239: } else if (issaws) {
1240: #if defined(PETSC_HAVE_SAWS)
1241: PetscMPIInt rank;
1243: PetscCall(PetscObjectName((PetscObject)mat));
1244: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1245: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1246: #endif
1247: } else if (isstring) {
1248: const char *type;
1249: PetscCall(MatGetType(mat, &type));
1250: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1251: PetscTryTypeMethod(mat, view, viewer);
1252: }
1253: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1254: PetscCall(PetscViewerASCIIPushTab(viewer));
1255: PetscUseTypeMethod(mat, viewnative, viewer);
1256: PetscCall(PetscViewerASCIIPopTab(viewer));
1257: } else if (mat->ops->view) {
1258: PetscCall(PetscViewerASCIIPushTab(viewer));
1259: PetscUseTypeMethod(mat, view, viewer);
1260: PetscCall(PetscViewerASCIIPopTab(viewer));
1261: }
1262: if (isascii) {
1263: PetscCall(PetscViewerGetFormat(viewer, &format));
1264: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1265: }
1266: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1267: #if !defined(PETSC_HAVE_THREADSAFETY)
1268: insidematview--;
1269: #endif
1270: PetscFunctionReturn(PETSC_SUCCESS);
1271: }
1273: #if defined(PETSC_USE_DEBUG)
1274: #include <../src/sys/totalview/tv_data_display.h>
1275: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1276: {
1277: TV_add_row("Local rows", "int", &mat->rmap->n);
1278: TV_add_row("Local columns", "int", &mat->cmap->n);
1279: TV_add_row("Global rows", "int", &mat->rmap->N);
1280: TV_add_row("Global columns", "int", &mat->cmap->N);
1281: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1282: return TV_format_OK;
1283: }
1284: #endif
1286: /*@
1287: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1288: with `MatView()`. The matrix format is determined from the options database.
1289: Generates a parallel MPI matrix if the communicator has more than one
1290: processor. The default matrix type is `MATAIJ`.
1292: Collective
1294: Input Parameters:
1295: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1296: or some related function before a call to `MatLoad()`
1297: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1299: Options Database Key:
1300: . -matload_block_size <bs> - set block size
1302: Level: beginner
1304: Notes:
1305: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1306: `Mat` before calling this routine if you wish to set it from the options database.
1308: `MatLoad()` automatically loads into the options database any options
1309: given in the file filename.info where filename is the name of the file
1310: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1311: file will be ignored if you use the -viewer_binary_skip_info option.
1313: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1314: sets the default matrix type AIJ and sets the local and global sizes.
1315: If type and/or size is already set, then the same are used.
1317: In parallel, each processor can load a subset of rows (or the
1318: entire matrix). This routine is especially useful when a large
1319: matrix is stored on disk and only part of it is desired on each
1320: processor. For example, a parallel solver may access only some of
1321: the rows from each processor. The algorithm used here reads
1322: relatively small blocks of data rather than reading the entire
1323: matrix and then subsetting it.
1325: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1326: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1327: or the sequence like
1328: .vb
1329: `PetscViewer` v;
1330: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1331: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1332: `PetscViewerSetFromOptions`(v);
1333: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1334: `PetscViewerFileSetName`(v,"datafile");
1335: .ve
1336: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1337: .vb
1338: -viewer_type {binary, hdf5}
1339: .ve
1341: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1342: and src/mat/tutorials/ex10.c with the second approach.
1344: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1345: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1346: Multiple objects, both matrices and vectors, can be stored within the same file.
1347: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1349: Most users should not need to know the details of the binary storage
1350: format, since `MatLoad()` and `MatView()` completely hide these details.
1351: But for anyone who is interested, the standard binary matrix storage
1352: format is
1354: .vb
1355: PetscInt MAT_FILE_CLASSID
1356: PetscInt number of rows
1357: PetscInt number of columns
1358: PetscInt total number of nonzeros
1359: PetscInt *number nonzeros in each row
1360: PetscInt *column indices of all nonzeros (starting index is zero)
1361: PetscScalar *values of all nonzeros
1362: .ve
1363: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1364: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1365: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1367: PETSc automatically does the byte swapping for
1368: machines that store the bytes reversed. Thus if you write your own binary
1369: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1370: and `PetscBinaryWrite()` to see how this may be done.
1372: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1373: Each processor's chunk is loaded independently by its owning MPI process.
1374: Multiple objects, both matrices and vectors, can be stored within the same file.
1375: They are looked up by their PetscObject name.
1377: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1378: by default the same structure and naming of the AIJ arrays and column count
1379: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1380: .vb
1381: save example.mat A b -v7.3
1382: .ve
1383: can be directly read by this routine (see Reference 1 for details).
1385: Depending on your MATLAB version, this format might be a default,
1386: otherwise you can set it as default in Preferences.
1388: Unless -nocompression flag is used to save the file in MATLAB,
1389: PETSc must be configured with ZLIB package.
1391: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1393: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1395: Corresponding `MatView()` is not yet implemented.
1397: The loaded matrix is actually a transpose of the original one in MATLAB,
1398: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1399: With this format, matrix is automatically transposed by PETSc,
1400: unless the matrix is marked as SPD or symmetric
1401: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1403: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1405: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1406: @*/
1407: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1408: {
1409: PetscBool flg;
1411: PetscFunctionBegin;
1415: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1417: flg = PETSC_FALSE;
1418: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1419: if (flg) {
1420: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1421: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1422: }
1423: flg = PETSC_FALSE;
1424: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1425: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1427: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1428: PetscUseTypeMethod(mat, load, viewer);
1429: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1430: PetscFunctionReturn(PETSC_SUCCESS);
1431: }
1433: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1434: {
1435: Mat_Redundant *redund = *redundant;
1437: PetscFunctionBegin;
1438: if (redund) {
1439: if (redund->matseq) { /* via MatCreateSubMatrices() */
1440: PetscCall(ISDestroy(&redund->isrow));
1441: PetscCall(ISDestroy(&redund->iscol));
1442: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1443: } else {
1444: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1445: PetscCall(PetscFree(redund->sbuf_j));
1446: PetscCall(PetscFree(redund->sbuf_a));
1447: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1448: PetscCall(PetscFree(redund->rbuf_j[i]));
1449: PetscCall(PetscFree(redund->rbuf_a[i]));
1450: }
1451: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1452: }
1454: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1455: PetscCall(PetscFree(redund));
1456: }
1457: PetscFunctionReturn(PETSC_SUCCESS);
1458: }
1460: /*@
1461: MatDestroy - Frees space taken by a matrix.
1463: Collective
1465: Input Parameter:
1466: . A - the matrix
1468: Level: beginner
1470: Developer Note:
1471: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1472: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1473: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1474: if changes are needed here.
1476: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1477: @*/
1478: PetscErrorCode MatDestroy(Mat *A)
1479: {
1480: PetscFunctionBegin;
1481: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1483: if (--((PetscObject)*A)->refct > 0) {
1484: *A = NULL;
1485: PetscFunctionReturn(PETSC_SUCCESS);
1486: }
1488: /* if memory was published with SAWs then destroy it */
1489: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1490: PetscTryTypeMethod(*A, destroy);
1492: PetscCall(PetscFree((*A)->factorprefix));
1493: PetscCall(PetscFree((*A)->defaultvectype));
1494: PetscCall(PetscFree((*A)->defaultrandtype));
1495: PetscCall(PetscFree((*A)->bsizes));
1496: PetscCall(PetscFree((*A)->solvertype));
1497: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1498: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1499: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1500: PetscCall(MatProductClear(*A));
1501: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1502: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1503: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1504: PetscCall(MatDestroy(&(*A)->schur));
1505: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1506: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1507: PetscCall(PetscHeaderDestroy(A));
1508: PetscFunctionReturn(PETSC_SUCCESS);
1509: }
1511: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1512: /*@
1513: MatSetValues - Inserts or adds a block of values into a matrix.
1514: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1515: MUST be called after all calls to `MatSetValues()` have been completed.
1517: Not Collective
1519: Input Parameters:
1520: + mat - the matrix
1521: . m - the number of rows
1522: . idxm - the global indices of the rows
1523: . n - the number of columns
1524: . idxn - the global indices of the columns
1525: . v - a logically two-dimensional array of values
1526: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1528: Level: beginner
1530: Notes:
1531: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1533: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1534: options cannot be mixed without intervening calls to the assembly
1535: routines.
1537: `MatSetValues()` uses 0-based row and column numbers in Fortran
1538: as well as in C.
1540: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1541: simply ignored. This allows easily inserting element stiffness matrices
1542: with homogeneous Dirichlet boundary conditions that you don't want represented
1543: in the matrix.
1545: Efficiency Alert:
1546: The routine `MatSetValuesBlocked()` may offer much better efficiency
1547: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1549: Fortran Notes:
1550: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1551: .vb
1552: call MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
1553: .ve
1555: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1557: Developer Note:
1558: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1559: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1561: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1562: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1563: @*/
1564: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1565: {
1566: PetscFunctionBeginHot;
1569: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1570: PetscAssertPointer(idxm, 3);
1571: PetscAssertPointer(idxn, 5);
1572: MatCheckPreallocated(mat, 1);
1574: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1575: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1577: if (PetscDefined(USE_DEBUG)) {
1578: PetscInt i, j;
1580: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1581: if (v) {
1582: for (i = 0; i < m; i++) {
1583: for (j = 0; j < n; j++) {
1584: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1585: #if defined(PETSC_USE_COMPLEX)
1586: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1587: #else
1588: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1589: #endif
1590: }
1591: }
1592: }
1593: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1594: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1595: }
1597: if (mat->assembled) {
1598: mat->was_assembled = PETSC_TRUE;
1599: mat->assembled = PETSC_FALSE;
1600: }
1601: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1602: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1603: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1604: PetscFunctionReturn(PETSC_SUCCESS);
1605: }
1607: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1608: /*@
1609: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1610: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1611: MUST be called after all calls to `MatSetValues()` have been completed.
1613: Not Collective
1615: Input Parameters:
1616: + mat - the matrix
1617: . ism - the rows to provide
1618: . isn - the columns to provide
1619: . v - a logically two-dimensional array of values
1620: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1622: Level: beginner
1624: Notes:
1625: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1627: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1628: options cannot be mixed without intervening calls to the assembly
1629: routines.
1631: `MatSetValues()` uses 0-based row and column numbers in Fortran
1632: as well as in C.
1634: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1635: simply ignored. This allows easily inserting element stiffness matrices
1636: with homogeneous Dirichlet boundary conditions that you don't want represented
1637: in the matrix.
1639: Efficiency Alert:
1640: The routine `MatSetValuesBlocked()` may offer much better efficiency
1641: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1643: This is currently not optimized for any particular `ISType`
1645: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1646: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1647: @*/
1648: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1649: {
1650: PetscInt m, n;
1651: const PetscInt *rows, *cols;
1653: PetscFunctionBeginHot;
1655: PetscCall(ISGetIndices(ism, &rows));
1656: PetscCall(ISGetIndices(isn, &cols));
1657: PetscCall(ISGetLocalSize(ism, &m));
1658: PetscCall(ISGetLocalSize(isn, &n));
1659: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1660: PetscCall(ISRestoreIndices(ism, &rows));
1661: PetscCall(ISRestoreIndices(isn, &cols));
1662: PetscFunctionReturn(PETSC_SUCCESS);
1663: }
1665: /*@
1666: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1667: values into a matrix
1669: Not Collective
1671: Input Parameters:
1672: + mat - the matrix
1673: . row - the (block) row to set
1674: - v - a logically two-dimensional array of values
1676: Level: intermediate
1678: Notes:
1679: The values, `v`, are column-oriented (for the block version) and sorted
1681: All the nonzero values in `row` must be provided
1683: The matrix must have previously had its column indices set, likely by having been assembled.
1685: `row` must belong to this MPI process
1687: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1688: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1689: @*/
1690: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1691: {
1692: PetscInt globalrow;
1694: PetscFunctionBegin;
1697: PetscAssertPointer(v, 3);
1698: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1699: PetscCall(MatSetValuesRow(mat, globalrow, v));
1700: PetscFunctionReturn(PETSC_SUCCESS);
1701: }
1703: /*@
1704: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1705: values into a matrix
1707: Not Collective
1709: Input Parameters:
1710: + mat - the matrix
1711: . row - the (block) row to set
1712: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1714: Level: advanced
1716: Notes:
1717: The values, `v`, are column-oriented for the block version.
1719: All the nonzeros in `row` must be provided
1721: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1723: `row` must belong to this process
1725: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1726: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1727: @*/
1728: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1729: {
1730: PetscFunctionBeginHot;
1733: MatCheckPreallocated(mat, 1);
1734: PetscAssertPointer(v, 3);
1735: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1736: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1737: mat->insertmode = INSERT_VALUES;
1739: if (mat->assembled) {
1740: mat->was_assembled = PETSC_TRUE;
1741: mat->assembled = PETSC_FALSE;
1742: }
1743: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1744: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1745: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1746: PetscFunctionReturn(PETSC_SUCCESS);
1747: }
1749: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1750: /*@
1751: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1752: Using structured grid indexing
1754: Not Collective
1756: Input Parameters:
1757: + mat - the matrix
1758: . m - number of rows being entered
1759: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1760: . n - number of columns being entered
1761: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1762: . v - a logically two-dimensional array of values
1763: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1765: Level: beginner
1767: Notes:
1768: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1770: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1771: options cannot be mixed without intervening calls to the assembly
1772: routines.
1774: The grid coordinates are across the entire grid, not just the local portion
1776: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1777: as well as in C.
1779: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1781: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1782: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1784: The columns and rows in the stencil passed in MUST be contained within the
1785: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1786: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1787: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1788: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1790: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1791: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1792: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1793: `DM_BOUNDARY_PERIODIC` boundary type.
1795: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1796: a single value per point) you can skip filling those indices.
1798: Inspired by the structured grid interface to the HYPRE package
1799: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1801: Efficiency Alert:
1802: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1803: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1805: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1806: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1807: @*/
1808: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1809: {
1810: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1811: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1812: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1814: PetscFunctionBegin;
1815: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1818: PetscAssertPointer(idxm, 3);
1819: PetscAssertPointer(idxn, 5);
1821: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1822: jdxm = buf;
1823: jdxn = buf + m;
1824: } else {
1825: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1826: jdxm = bufm;
1827: jdxn = bufn;
1828: }
1829: for (i = 0; i < m; i++) {
1830: for (j = 0; j < 3 - sdim; j++) dxm++;
1831: tmp = *dxm++ - starts[0];
1832: for (j = 0; j < dim - 1; j++) {
1833: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1834: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1835: }
1836: if (mat->stencil.noc) dxm++;
1837: jdxm[i] = tmp;
1838: }
1839: for (i = 0; i < n; i++) {
1840: for (j = 0; j < 3 - sdim; j++) dxn++;
1841: tmp = *dxn++ - starts[0];
1842: for (j = 0; j < dim - 1; j++) {
1843: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1844: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1845: }
1846: if (mat->stencil.noc) dxn++;
1847: jdxn[i] = tmp;
1848: }
1849: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1850: PetscCall(PetscFree2(bufm, bufn));
1851: PetscFunctionReturn(PETSC_SUCCESS);
1852: }
1854: /*@
1855: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1856: Using structured grid indexing
1858: Not Collective
1860: Input Parameters:
1861: + mat - the matrix
1862: . m - number of rows being entered
1863: . idxm - grid coordinates for matrix rows being entered
1864: . n - number of columns being entered
1865: . idxn - grid coordinates for matrix columns being entered
1866: . v - a logically two-dimensional array of values
1867: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1869: Level: beginner
1871: Notes:
1872: By default the values, `v`, are row-oriented and unsorted.
1873: See `MatSetOption()` for other options.
1875: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1876: options cannot be mixed without intervening calls to the assembly
1877: routines.
1879: The grid coordinates are across the entire grid, not just the local portion
1881: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1882: as well as in C.
1884: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1886: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1887: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1889: The columns and rows in the stencil passed in MUST be contained within the
1890: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1891: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1892: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1893: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1895: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1896: simply ignored. This allows easily inserting element stiffness matrices
1897: with homogeneous Dirichlet boundary conditions that you don't want represented
1898: in the matrix.
1900: Inspired by the structured grid interface to the HYPRE package
1901: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1903: Fortran Note:
1904: `idxm` and `idxn` should be declared as
1905: .vb
1906: MatStencil idxm(4,m),idxn(4,n)
1907: .ve
1908: and the values inserted using
1909: .vb
1910: idxm(MatStencil_i,1) = i
1911: idxm(MatStencil_j,1) = j
1912: idxm(MatStencil_k,1) = k
1913: etc
1914: .ve
1916: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1917: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1918: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1919: @*/
1920: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1921: {
1922: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1923: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1924: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1926: PetscFunctionBegin;
1927: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1930: PetscAssertPointer(idxm, 3);
1931: PetscAssertPointer(idxn, 5);
1932: PetscAssertPointer(v, 6);
1934: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1935: jdxm = buf;
1936: jdxn = buf + m;
1937: } else {
1938: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1939: jdxm = bufm;
1940: jdxn = bufn;
1941: }
1942: for (i = 0; i < m; i++) {
1943: for (j = 0; j < 3 - sdim; j++) dxm++;
1944: tmp = *dxm++ - starts[0];
1945: for (j = 0; j < sdim - 1; j++) {
1946: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1947: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1948: }
1949: dxm++;
1950: jdxm[i] = tmp;
1951: }
1952: for (i = 0; i < n; i++) {
1953: for (j = 0; j < 3 - sdim; j++) dxn++;
1954: tmp = *dxn++ - starts[0];
1955: for (j = 0; j < sdim - 1; j++) {
1956: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1957: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1958: }
1959: dxn++;
1960: jdxn[i] = tmp;
1961: }
1962: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1963: PetscCall(PetscFree2(bufm, bufn));
1964: PetscFunctionReturn(PETSC_SUCCESS);
1965: }
1967: /*@
1968: MatSetStencil - Sets the grid information for setting values into a matrix via
1969: `MatSetValuesStencil()`
1971: Not Collective
1973: Input Parameters:
1974: + mat - the matrix
1975: . dim - dimension of the grid 1, 2, or 3
1976: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1977: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1978: - dof - number of degrees of freedom per node
1980: Level: beginner
1982: Notes:
1983: Inspired by the structured grid interface to the HYPRE package
1984: (www.llnl.gov/CASC/hyper)
1986: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1987: user.
1989: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1990: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1991: @*/
1992: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1993: {
1994: PetscFunctionBegin;
1996: PetscAssertPointer(dims, 3);
1997: PetscAssertPointer(starts, 4);
1999: mat->stencil.dim = dim + (dof > 1);
2000: for (PetscInt i = 0; i < dim; i++) {
2001: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
2002: mat->stencil.starts[i] = starts[dim - i - 1];
2003: }
2004: mat->stencil.dims[dim] = dof;
2005: mat->stencil.starts[dim] = 0;
2006: mat->stencil.noc = (PetscBool)(dof == 1);
2007: PetscFunctionReturn(PETSC_SUCCESS);
2008: }
2010: /*@
2011: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
2013: Not Collective
2015: Input Parameters:
2016: + mat - the matrix
2017: . m - the number of block rows
2018: . idxm - the global block indices
2019: . n - the number of block columns
2020: . idxn - the global block indices
2021: . v - a logically two-dimensional array of values
2022: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
2024: Level: intermediate
2026: Notes:
2027: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
2028: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
2030: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
2031: NOT the total number of rows/columns; for example, if the block size is 2 and
2032: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
2033: The values in `idxm` would be 1 2; that is the first index for each block divided by
2034: the block size.
2036: You must call `MatSetBlockSize()` when constructing this matrix (before
2037: preallocating it).
2039: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2041: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
2042: options cannot be mixed without intervening calls to the assembly
2043: routines.
2045: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
2046: as well as in C.
2048: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2049: simply ignored. This allows easily inserting element stiffness matrices
2050: with homogeneous Dirichlet boundary conditions that you don't want represented
2051: in the matrix.
2053: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2054: internal searching must be done to determine where to place the
2055: data in the matrix storage space. By instead inserting blocks of
2056: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2057: reduced.
2059: Example:
2060: .vb
2061: Suppose m=n=2 and block size(bs) = 2 The array is
2063: 1 2 | 3 4
2064: 5 6 | 7 8
2065: - - - | - - -
2066: 9 10 | 11 12
2067: 13 14 | 15 16
2069: v[] should be passed in like
2070: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2072: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2073: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2074: .ve
2076: Fortran Notes:
2077: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2078: .vb
2079: call MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
2080: .ve
2082: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2084: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2085: @*/
2086: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2087: {
2088: PetscFunctionBeginHot;
2091: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2092: PetscAssertPointer(idxm, 3);
2093: PetscAssertPointer(idxn, 5);
2094: MatCheckPreallocated(mat, 1);
2095: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2096: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2097: if (PetscDefined(USE_DEBUG)) {
2098: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2099: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2100: }
2101: if (PetscDefined(USE_DEBUG)) {
2102: PetscInt rbs, cbs, M, N, i;
2103: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2104: PetscCall(MatGetSize(mat, &M, &N));
2105: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2106: for (i = 0; i < n; i++)
2107: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2108: }
2109: if (mat->assembled) {
2110: mat->was_assembled = PETSC_TRUE;
2111: mat->assembled = PETSC_FALSE;
2112: }
2113: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2114: if (mat->ops->setvaluesblocked) {
2115: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2116: } else {
2117: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2118: PetscInt i, j, bs, cbs;
2120: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2121: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2122: iidxm = buf;
2123: iidxn = buf + m * bs;
2124: } else {
2125: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2126: iidxm = bufr;
2127: iidxn = bufc;
2128: }
2129: for (i = 0; i < m; i++) {
2130: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2131: }
2132: if (m != n || bs != cbs || idxm != idxn) {
2133: for (i = 0; i < n; i++) {
2134: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2135: }
2136: } else iidxn = iidxm;
2137: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2138: PetscCall(PetscFree2(bufr, bufc));
2139: }
2140: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2141: PetscFunctionReturn(PETSC_SUCCESS);
2142: }
2144: /*@
2145: MatGetValues - Gets a block of local values from a matrix.
2147: Not Collective; can only return values that are owned by the give process
2149: Input Parameters:
2150: + mat - the matrix
2151: . v - a logically two-dimensional array for storing the values
2152: . m - the number of rows
2153: . idxm - the global indices of the rows
2154: . n - the number of columns
2155: - idxn - the global indices of the columns
2157: Level: advanced
2159: Notes:
2160: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2161: The values, `v`, are then returned in a row-oriented format,
2162: analogous to that used by default in `MatSetValues()`.
2164: `MatGetValues()` uses 0-based row and column numbers in
2165: Fortran as well as in C.
2167: `MatGetValues()` requires that the matrix has been assembled
2168: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2169: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2170: without intermediate matrix assembly.
2172: Negative row or column indices will be ignored and those locations in `v` will be
2173: left unchanged.
2175: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2176: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2177: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2179: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2180: @*/
2181: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2182: {
2183: PetscFunctionBegin;
2186: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2187: PetscAssertPointer(idxm, 3);
2188: PetscAssertPointer(idxn, 5);
2189: PetscAssertPointer(v, 6);
2190: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2191: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2192: MatCheckPreallocated(mat, 1);
2194: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2195: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2196: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2197: PetscFunctionReturn(PETSC_SUCCESS);
2198: }
2200: /*@
2201: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2202: defined previously by `MatSetLocalToGlobalMapping()`
2204: Not Collective
2206: Input Parameters:
2207: + mat - the matrix
2208: . nrow - number of rows
2209: . irow - the row local indices
2210: . ncol - number of columns
2211: - icol - the column local indices
2213: Output Parameter:
2214: . y - a logically two-dimensional array of values
2216: Level: advanced
2218: Notes:
2219: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2221: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2222: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2223: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2224: with `MatSetLocalToGlobalMapping()`.
2226: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2227: `MatSetValuesLocal()`, `MatGetValues()`
2228: @*/
2229: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2230: {
2231: PetscFunctionBeginHot;
2234: MatCheckPreallocated(mat, 1);
2235: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2236: PetscAssertPointer(irow, 3);
2237: PetscAssertPointer(icol, 5);
2238: if (PetscDefined(USE_DEBUG)) {
2239: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2240: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2241: }
2242: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2243: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2244: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2245: else {
2246: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2247: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2248: irowm = buf;
2249: icolm = buf + nrow;
2250: } else {
2251: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2252: irowm = bufr;
2253: icolm = bufc;
2254: }
2255: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2256: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2257: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2258: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2259: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2260: PetscCall(PetscFree2(bufr, bufc));
2261: }
2262: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2263: PetscFunctionReturn(PETSC_SUCCESS);
2264: }
2266: /*@
2267: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2268: the same size. Currently, this can only be called once and creates the given matrix.
2270: Not Collective
2272: Input Parameters:
2273: + mat - the matrix
2274: . nb - the number of blocks
2275: . bs - the number of rows (and columns) in each block
2276: . rows - a concatenation of the rows for each block
2277: - v - a concatenation of logically two-dimensional arrays of values
2279: Level: advanced
2281: Notes:
2282: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2284: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2286: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2287: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2288: @*/
2289: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2290: {
2291: PetscFunctionBegin;
2294: PetscAssertPointer(rows, 4);
2295: PetscAssertPointer(v, 5);
2296: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2298: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2299: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2300: else {
2301: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2302: }
2303: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2304: PetscFunctionReturn(PETSC_SUCCESS);
2305: }
2307: /*@
2308: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2309: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2310: using a local (per-processor) numbering.
2312: Not Collective
2314: Input Parameters:
2315: + x - the matrix
2316: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2317: - cmapping - column mapping
2319: Level: intermediate
2321: Note:
2322: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2324: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2325: @*/
2326: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2327: {
2328: PetscFunctionBegin;
2333: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2334: else {
2335: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2336: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2337: }
2338: PetscFunctionReturn(PETSC_SUCCESS);
2339: }
2341: /*@
2342: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2344: Not Collective
2346: Input Parameter:
2347: . A - the matrix
2349: Output Parameters:
2350: + rmapping - row mapping
2351: - cmapping - column mapping
2353: Level: advanced
2355: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2356: @*/
2357: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2358: {
2359: PetscFunctionBegin;
2362: if (rmapping) {
2363: PetscAssertPointer(rmapping, 2);
2364: *rmapping = A->rmap->mapping;
2365: }
2366: if (cmapping) {
2367: PetscAssertPointer(cmapping, 3);
2368: *cmapping = A->cmap->mapping;
2369: }
2370: PetscFunctionReturn(PETSC_SUCCESS);
2371: }
2373: /*@
2374: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2376: Logically Collective
2378: Input Parameters:
2379: + A - the matrix
2380: . rmap - row layout
2381: - cmap - column layout
2383: Level: advanced
2385: Note:
2386: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2388: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2389: @*/
2390: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2391: {
2392: PetscFunctionBegin;
2394: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2395: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2396: PetscFunctionReturn(PETSC_SUCCESS);
2397: }
2399: /*@
2400: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2402: Not Collective
2404: Input Parameter:
2405: . A - the matrix
2407: Output Parameters:
2408: + rmap - row layout
2409: - cmap - column layout
2411: Level: advanced
2413: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2414: @*/
2415: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2416: {
2417: PetscFunctionBegin;
2420: if (rmap) {
2421: PetscAssertPointer(rmap, 2);
2422: *rmap = A->rmap;
2423: }
2424: if (cmap) {
2425: PetscAssertPointer(cmap, 3);
2426: *cmap = A->cmap;
2427: }
2428: PetscFunctionReturn(PETSC_SUCCESS);
2429: }
2431: /*@
2432: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2433: using a local numbering of the rows and columns.
2435: Not Collective
2437: Input Parameters:
2438: + mat - the matrix
2439: . nrow - number of rows
2440: . irow - the row local indices
2441: . ncol - number of columns
2442: . icol - the column local indices
2443: . y - a logically two-dimensional array of values
2444: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2446: Level: intermediate
2448: Notes:
2449: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2451: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2452: options cannot be mixed without intervening calls to the assembly
2453: routines.
2455: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2456: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2458: Fortran Notes:
2459: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2460: .vb
2461: call MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2462: .ve
2464: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2466: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2467: `MatGetValuesLocal()`
2468: @*/
2469: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2470: {
2471: PetscFunctionBeginHot;
2474: MatCheckPreallocated(mat, 1);
2475: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2476: PetscAssertPointer(irow, 3);
2477: PetscAssertPointer(icol, 5);
2478: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2479: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2480: if (PetscDefined(USE_DEBUG)) {
2481: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2482: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2483: }
2485: if (mat->assembled) {
2486: mat->was_assembled = PETSC_TRUE;
2487: mat->assembled = PETSC_FALSE;
2488: }
2489: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2490: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2491: else {
2492: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2493: const PetscInt *irowm, *icolm;
2495: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2496: bufr = buf;
2497: bufc = buf + nrow;
2498: irowm = bufr;
2499: icolm = bufc;
2500: } else {
2501: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2502: irowm = bufr;
2503: icolm = bufc;
2504: }
2505: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2506: else irowm = irow;
2507: if (mat->cmap->mapping) {
2508: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2509: else icolm = irowm;
2510: } else icolm = icol;
2511: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2512: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2513: }
2514: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2515: PetscFunctionReturn(PETSC_SUCCESS);
2516: }
2518: /*@
2519: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2520: using a local ordering of the nodes a block at a time.
2522: Not Collective
2524: Input Parameters:
2525: + mat - the matrix
2526: . nrow - number of rows
2527: . irow - the row local indices
2528: . ncol - number of columns
2529: . icol - the column local indices
2530: . y - a logically two-dimensional array of values
2531: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2533: Level: intermediate
2535: Notes:
2536: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2537: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2539: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2540: options cannot be mixed without intervening calls to the assembly
2541: routines.
2543: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2544: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2546: Fortran Notes:
2547: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2548: .vb
2549: call MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2550: .ve
2552: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2554: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2555: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2556: @*/
2557: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2558: {
2559: PetscFunctionBeginHot;
2562: MatCheckPreallocated(mat, 1);
2563: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2564: PetscAssertPointer(irow, 3);
2565: PetscAssertPointer(icol, 5);
2566: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2567: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2568: if (PetscDefined(USE_DEBUG)) {
2569: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2570: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2571: }
2573: if (mat->assembled) {
2574: mat->was_assembled = PETSC_TRUE;
2575: mat->assembled = PETSC_FALSE;
2576: }
2577: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2578: PetscInt irbs, rbs;
2579: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2580: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2581: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2582: }
2583: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2584: PetscInt icbs, cbs;
2585: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2586: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2587: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2588: }
2589: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2590: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2591: else {
2592: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2593: const PetscInt *irowm, *icolm;
2595: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2596: bufr = buf;
2597: bufc = buf + nrow;
2598: irowm = bufr;
2599: icolm = bufc;
2600: } else {
2601: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2602: irowm = bufr;
2603: icolm = bufc;
2604: }
2605: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2606: else irowm = irow;
2607: if (mat->cmap->mapping) {
2608: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2609: else icolm = irowm;
2610: } else icolm = icol;
2611: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2612: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2613: }
2614: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2615: PetscFunctionReturn(PETSC_SUCCESS);
2616: }
2618: /*@
2619: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2621: Collective
2623: Input Parameters:
2624: + mat - the matrix
2625: - x - the vector to be multiplied
2627: Output Parameter:
2628: . y - the result
2630: Level: developer
2632: Note:
2633: The vectors `x` and `y` cannot be the same. I.e., one cannot
2634: call `MatMultDiagonalBlock`(A,y,y).
2636: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2637: @*/
2638: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2639: {
2640: PetscFunctionBegin;
2646: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2647: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2648: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2649: MatCheckPreallocated(mat, 1);
2651: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2652: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2653: PetscFunctionReturn(PETSC_SUCCESS);
2654: }
2656: /*@
2657: MatMult - Computes the matrix-vector product, $y = Ax$.
2659: Neighbor-wise Collective
2661: Input Parameters:
2662: + mat - the matrix
2663: - x - the vector to be multiplied
2665: Output Parameter:
2666: . y - the result
2668: Level: beginner
2670: Note:
2671: The vectors `x` and `y` cannot be the same. I.e., one cannot
2672: call `MatMult`(A,y,y).
2674: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2675: @*/
2676: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2677: {
2678: PetscFunctionBegin;
2682: VecCheckAssembled(x);
2684: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2685: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2686: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2687: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2688: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2689: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2690: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2691: PetscCall(VecSetErrorIfLocked(y, 3));
2692: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2693: MatCheckPreallocated(mat, 1);
2695: PetscCall(VecLockReadPush(x));
2696: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2697: PetscUseTypeMethod(mat, mult, x, y);
2698: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2699: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2700: PetscCall(VecLockReadPop(x));
2701: PetscFunctionReturn(PETSC_SUCCESS);
2702: }
2704: /*@
2705: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2707: Neighbor-wise Collective
2709: Input Parameters:
2710: + mat - the matrix
2711: - x - the vector to be multiplied
2713: Output Parameter:
2714: . y - the result
2716: Level: beginner
2718: Notes:
2719: The vectors `x` and `y` cannot be the same. I.e., one cannot
2720: call `MatMultTranspose`(A,y,y).
2722: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2723: use `MatMultHermitianTranspose()`
2725: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2726: @*/
2727: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2728: {
2729: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2731: PetscFunctionBegin;
2735: VecCheckAssembled(x);
2738: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2739: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2740: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2741: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2742: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2743: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2744: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2745: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2746: MatCheckPreallocated(mat, 1);
2748: if (!mat->ops->multtranspose) {
2749: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2750: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2751: } else op = mat->ops->multtranspose;
2752: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2753: PetscCall(VecLockReadPush(x));
2754: PetscCall((*op)(mat, x, y));
2755: PetscCall(VecLockReadPop(x));
2756: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2757: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2758: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2759: PetscFunctionReturn(PETSC_SUCCESS);
2760: }
2762: /*@
2763: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2765: Neighbor-wise Collective
2767: Input Parameters:
2768: + mat - the matrix
2769: - x - the vector to be multiplied
2771: Output Parameter:
2772: . y - the result
2774: Level: beginner
2776: Notes:
2777: The vectors `x` and `y` cannot be the same. I.e., one cannot
2778: call `MatMultHermitianTranspose`(A,y,y).
2780: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2782: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2784: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2785: @*/
2786: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2787: {
2788: PetscFunctionBegin;
2794: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2795: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2796: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2797: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2798: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2799: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2800: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2801: MatCheckPreallocated(mat, 1);
2803: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2804: #if defined(PETSC_USE_COMPLEX)
2805: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2806: PetscCall(VecLockReadPush(x));
2807: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2808: else PetscUseTypeMethod(mat, mult, x, y);
2809: PetscCall(VecLockReadPop(x));
2810: } else {
2811: Vec w;
2812: PetscCall(VecDuplicate(x, &w));
2813: PetscCall(VecCopy(x, w));
2814: PetscCall(VecConjugate(w));
2815: PetscCall(MatMultTranspose(mat, w, y));
2816: PetscCall(VecDestroy(&w));
2817: PetscCall(VecConjugate(y));
2818: }
2819: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2820: #else
2821: PetscCall(MatMultTranspose(mat, x, y));
2822: #endif
2823: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2824: PetscFunctionReturn(PETSC_SUCCESS);
2825: }
2827: /*@
2828: MatMultAdd - Computes $v3 = v2 + A * v1$.
2830: Neighbor-wise Collective
2832: Input Parameters:
2833: + mat - the matrix
2834: . v1 - the vector to be multiplied by `mat`
2835: - v2 - the vector to be added to the result
2837: Output Parameter:
2838: . v3 - the result
2840: Level: beginner
2842: Note:
2843: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2844: call `MatMultAdd`(A,v1,v2,v1).
2846: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2847: @*/
2848: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2849: {
2850: PetscFunctionBegin;
2857: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2858: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2859: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2860: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2861: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2862: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2863: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2864: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2865: MatCheckPreallocated(mat, 1);
2867: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2868: PetscCall(VecLockReadPush(v1));
2869: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2870: PetscCall(VecLockReadPop(v1));
2871: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2872: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2873: PetscFunctionReturn(PETSC_SUCCESS);
2874: }
2876: /*@
2877: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2879: Neighbor-wise Collective
2881: Input Parameters:
2882: + mat - the matrix
2883: . v1 - the vector to be multiplied by the transpose of the matrix
2884: - v2 - the vector to be added to the result
2886: Output Parameter:
2887: . v3 - the result
2889: Level: beginner
2891: Note:
2892: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2893: call `MatMultTransposeAdd`(A,v1,v2,v1).
2895: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2896: @*/
2897: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2898: {
2899: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2901: PetscFunctionBegin;
2908: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2909: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2910: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2911: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2912: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2913: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2914: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2915: MatCheckPreallocated(mat, 1);
2917: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2918: PetscCall(VecLockReadPush(v1));
2919: PetscCall((*op)(mat, v1, v2, v3));
2920: PetscCall(VecLockReadPop(v1));
2921: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2922: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2923: PetscFunctionReturn(PETSC_SUCCESS);
2924: }
2926: /*@
2927: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2929: Neighbor-wise Collective
2931: Input Parameters:
2932: + mat - the matrix
2933: . v1 - the vector to be multiplied by the Hermitian transpose
2934: - v2 - the vector to be added to the result
2936: Output Parameter:
2937: . v3 - the result
2939: Level: beginner
2941: Note:
2942: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2943: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2945: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2946: @*/
2947: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2948: {
2949: PetscFunctionBegin;
2956: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2957: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2958: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2959: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2960: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2961: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2962: MatCheckPreallocated(mat, 1);
2964: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2965: PetscCall(VecLockReadPush(v1));
2966: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2967: else {
2968: Vec w, z;
2969: PetscCall(VecDuplicate(v1, &w));
2970: PetscCall(VecCopy(v1, w));
2971: PetscCall(VecConjugate(w));
2972: PetscCall(VecDuplicate(v3, &z));
2973: PetscCall(MatMultTranspose(mat, w, z));
2974: PetscCall(VecDestroy(&w));
2975: PetscCall(VecConjugate(z));
2976: if (v2 != v3) {
2977: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2978: } else {
2979: PetscCall(VecAXPY(v3, 1.0, z));
2980: }
2981: PetscCall(VecDestroy(&z));
2982: }
2983: PetscCall(VecLockReadPop(v1));
2984: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2985: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2986: PetscFunctionReturn(PETSC_SUCCESS);
2987: }
2989: /*@
2990: MatGetFactorType - gets the type of factorization a matrix is
2992: Not Collective
2994: Input Parameter:
2995: . mat - the matrix
2997: Output Parameter:
2998: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3000: Level: intermediate
3002: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3003: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3004: @*/
3005: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
3006: {
3007: PetscFunctionBegin;
3010: PetscAssertPointer(t, 2);
3011: *t = mat->factortype;
3012: PetscFunctionReturn(PETSC_SUCCESS);
3013: }
3015: /*@
3016: MatSetFactorType - sets the type of factorization a matrix is
3018: Logically Collective
3020: Input Parameters:
3021: + mat - the matrix
3022: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3024: Level: intermediate
3026: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3027: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3028: @*/
3029: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
3030: {
3031: PetscFunctionBegin;
3034: mat->factortype = t;
3035: PetscFunctionReturn(PETSC_SUCCESS);
3036: }
3038: /*@
3039: MatGetInfo - Returns information about matrix storage (number of
3040: nonzeros, memory, etc.).
3042: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3044: Input Parameters:
3045: + mat - the matrix
3046: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3048: Output Parameter:
3049: . info - matrix information context
3051: Options Database Key:
3052: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3054: Level: intermediate
3056: Notes:
3057: The `MatInfo` context contains a variety of matrix data, including
3058: number of nonzeros allocated and used, number of mallocs during
3059: matrix assembly, etc. Additional information for factored matrices
3060: is provided (such as the fill ratio, number of mallocs during
3061: factorization, etc.).
3063: Example:
3064: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3065: data within the `MatInfo` context. For example,
3066: .vb
3067: MatInfo info;
3068: Mat A;
3069: double mal, nz_a, nz_u;
3071: MatGetInfo(A, MAT_LOCAL, &info);
3072: mal = info.mallocs;
3073: nz_a = info.nz_allocated;
3074: .ve
3076: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3077: @*/
3078: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3079: {
3080: PetscFunctionBegin;
3083: PetscAssertPointer(info, 3);
3084: MatCheckPreallocated(mat, 1);
3085: PetscUseTypeMethod(mat, getinfo, flag, info);
3086: PetscFunctionReturn(PETSC_SUCCESS);
3087: }
3089: /*
3090: This is used by external packages where it is not easy to get the info from the actual
3091: matrix factorization.
3092: */
3093: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3094: {
3095: PetscFunctionBegin;
3096: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3097: PetscFunctionReturn(PETSC_SUCCESS);
3098: }
3100: /*@
3101: MatLUFactor - Performs in-place LU factorization of matrix.
3103: Collective
3105: Input Parameters:
3106: + mat - the matrix
3107: . row - row permutation
3108: . col - column permutation
3109: - info - options for factorization, includes
3110: .vb
3111: fill - expected fill as ratio of original fill.
3112: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3113: Run with the option -info to determine an optimal value to use
3114: .ve
3116: Level: developer
3118: Notes:
3119: Most users should employ the `KSP` interface for linear solvers
3120: instead of working directly with matrix algebra routines such as this.
3121: See, e.g., `KSPCreate()`.
3123: This changes the state of the matrix to a factored matrix; it cannot be used
3124: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3126: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3127: when not using `KSP`.
3129: Fortran Note:
3130: A valid (non-null) `info` argument must be provided
3132: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3133: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3134: @*/
3135: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3136: {
3137: MatFactorInfo tinfo;
3139: PetscFunctionBegin;
3143: if (info) PetscAssertPointer(info, 4);
3145: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3146: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3147: MatCheckPreallocated(mat, 1);
3148: if (!info) {
3149: PetscCall(MatFactorInfoInitialize(&tinfo));
3150: info = &tinfo;
3151: }
3153: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3154: PetscUseTypeMethod(mat, lufactor, row, col, info);
3155: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3156: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3157: PetscFunctionReturn(PETSC_SUCCESS);
3158: }
3160: /*@
3161: MatILUFactor - Performs in-place ILU factorization of matrix.
3163: Collective
3165: Input Parameters:
3166: + mat - the matrix
3167: . row - row permutation
3168: . col - column permutation
3169: - info - structure containing
3170: .vb
3171: levels - number of levels of fill.
3172: expected fill - as ratio of original fill.
3173: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3174: missing diagonal entries)
3175: .ve
3177: Level: developer
3179: Notes:
3180: Most users should employ the `KSP` interface for linear solvers
3181: instead of working directly with matrix algebra routines such as this.
3182: See, e.g., `KSPCreate()`.
3184: Probably really in-place only when level of fill is zero, otherwise allocates
3185: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3186: when not using `KSP`.
3188: Fortran Note:
3189: A valid (non-null) `info` argument must be provided
3191: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3192: @*/
3193: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3194: {
3195: PetscFunctionBegin;
3199: PetscAssertPointer(info, 4);
3201: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3202: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3203: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3204: MatCheckPreallocated(mat, 1);
3206: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3207: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3208: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3209: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3210: PetscFunctionReturn(PETSC_SUCCESS);
3211: }
3213: /*@
3214: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3215: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3217: Collective
3219: Input Parameters:
3220: + fact - the factor matrix obtained with `MatGetFactor()`
3221: . mat - the matrix
3222: . row - the row permutation
3223: . col - the column permutation
3224: - info - options for factorization, includes
3225: .vb
3226: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3227: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3228: .ve
3230: Level: developer
3232: Notes:
3233: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3235: Most users should employ the simplified `KSP` interface for linear solvers
3236: instead of working directly with matrix algebra routines such as this.
3237: See, e.g., `KSPCreate()`.
3239: Fortran Note:
3240: A valid (non-null) `info` argument must be provided
3242: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3243: @*/
3244: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3245: {
3246: MatFactorInfo tinfo;
3248: PetscFunctionBegin;
3253: if (info) PetscAssertPointer(info, 5);
3256: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3257: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3258: MatCheckPreallocated(mat, 2);
3259: if (!info) {
3260: PetscCall(MatFactorInfoInitialize(&tinfo));
3261: info = &tinfo;
3262: }
3264: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3265: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3266: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3267: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3268: PetscFunctionReturn(PETSC_SUCCESS);
3269: }
3271: /*@
3272: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3273: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3275: Collective
3277: Input Parameters:
3278: + fact - the factor matrix obtained with `MatGetFactor()`
3279: . mat - the matrix
3280: - info - options for factorization
3282: Level: developer
3284: Notes:
3285: See `MatLUFactor()` for in-place factorization. See
3286: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3288: Most users should employ the `KSP` interface for linear solvers
3289: instead of working directly with matrix algebra routines such as this.
3290: See, e.g., `KSPCreate()`.
3292: Fortran Note:
3293: A valid (non-null) `info` argument must be provided
3295: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3296: @*/
3297: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3298: {
3299: MatFactorInfo tinfo;
3301: PetscFunctionBegin;
3306: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3307: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3308: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3310: MatCheckPreallocated(mat, 2);
3311: if (!info) {
3312: PetscCall(MatFactorInfoInitialize(&tinfo));
3313: info = &tinfo;
3314: }
3316: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3317: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3318: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3319: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3320: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3321: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3322: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3323: PetscFunctionReturn(PETSC_SUCCESS);
3324: }
3326: /*@
3327: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3328: symmetric matrix.
3330: Collective
3332: Input Parameters:
3333: + mat - the matrix
3334: . perm - row and column permutations
3335: - info - expected fill as ratio of original fill
3337: Level: developer
3339: Notes:
3340: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3341: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3343: Most users should employ the `KSP` interface for linear solvers
3344: instead of working directly with matrix algebra routines such as this.
3345: See, e.g., `KSPCreate()`.
3347: Fortran Note:
3348: A valid (non-null) `info` argument must be provided
3350: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3351: `MatGetOrdering()`
3352: @*/
3353: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3354: {
3355: MatFactorInfo tinfo;
3357: PetscFunctionBegin;
3360: if (info) PetscAssertPointer(info, 3);
3362: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3363: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3364: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3365: MatCheckPreallocated(mat, 1);
3366: if (!info) {
3367: PetscCall(MatFactorInfoInitialize(&tinfo));
3368: info = &tinfo;
3369: }
3371: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3372: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3373: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3374: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3375: PetscFunctionReturn(PETSC_SUCCESS);
3376: }
3378: /*@
3379: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3380: of a symmetric matrix.
3382: Collective
3384: Input Parameters:
3385: + fact - the factor matrix obtained with `MatGetFactor()`
3386: . mat - the matrix
3387: . perm - row and column permutations
3388: - info - options for factorization, includes
3389: .vb
3390: fill - expected fill as ratio of original fill.
3391: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3392: Run with the option -info to determine an optimal value to use
3393: .ve
3395: Level: developer
3397: Notes:
3398: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3399: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3401: Most users should employ the `KSP` interface for linear solvers
3402: instead of working directly with matrix algebra routines such as this.
3403: See, e.g., `KSPCreate()`.
3405: Fortran Note:
3406: A valid (non-null) `info` argument must be provided
3408: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3409: `MatGetOrdering()`
3410: @*/
3411: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3412: {
3413: MatFactorInfo tinfo;
3415: PetscFunctionBegin;
3419: if (info) PetscAssertPointer(info, 4);
3422: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3423: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3424: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3425: MatCheckPreallocated(mat, 2);
3426: if (!info) {
3427: PetscCall(MatFactorInfoInitialize(&tinfo));
3428: info = &tinfo;
3429: }
3431: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3432: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3433: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3434: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3435: PetscFunctionReturn(PETSC_SUCCESS);
3436: }
3438: /*@
3439: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3440: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3441: `MatCholeskyFactorSymbolic()`.
3443: Collective
3445: Input Parameters:
3446: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3447: . mat - the initial matrix that is to be factored
3448: - info - options for factorization
3450: Level: developer
3452: Note:
3453: Most users should employ the `KSP` interface for linear solvers
3454: instead of working directly with matrix algebra routines such as this.
3455: See, e.g., `KSPCreate()`.
3457: Fortran Note:
3458: A valid (non-null) `info` argument must be provided
3460: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3461: @*/
3462: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3463: {
3464: MatFactorInfo tinfo;
3466: PetscFunctionBegin;
3471: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3472: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3473: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3474: MatCheckPreallocated(mat, 2);
3475: if (!info) {
3476: PetscCall(MatFactorInfoInitialize(&tinfo));
3477: info = &tinfo;
3478: }
3480: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3481: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3482: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3483: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3484: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3485: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3486: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3487: PetscFunctionReturn(PETSC_SUCCESS);
3488: }
3490: /*@
3491: MatQRFactor - Performs in-place QR factorization of matrix.
3493: Collective
3495: Input Parameters:
3496: + mat - the matrix
3497: . col - column permutation
3498: - info - options for factorization, includes
3499: .vb
3500: fill - expected fill as ratio of original fill.
3501: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3502: Run with the option -info to determine an optimal value to use
3503: .ve
3505: Level: developer
3507: Notes:
3508: Most users should employ the `KSP` interface for linear solvers
3509: instead of working directly with matrix algebra routines such as this.
3510: See, e.g., `KSPCreate()`.
3512: This changes the state of the matrix to a factored matrix; it cannot be used
3513: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3515: Fortran Note:
3516: A valid (non-null) `info` argument must be provided
3518: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3519: `MatSetUnfactored()`
3520: @*/
3521: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3522: {
3523: PetscFunctionBegin;
3526: if (info) PetscAssertPointer(info, 3);
3528: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3529: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3530: MatCheckPreallocated(mat, 1);
3531: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3532: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3533: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3534: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3535: PetscFunctionReturn(PETSC_SUCCESS);
3536: }
3538: /*@
3539: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3540: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3542: Collective
3544: Input Parameters:
3545: + fact - the factor matrix obtained with `MatGetFactor()`
3546: . mat - the matrix
3547: . col - column permutation
3548: - info - options for factorization, includes
3549: .vb
3550: fill - expected fill as ratio of original fill.
3551: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3552: Run with the option -info to determine an optimal value to use
3553: .ve
3555: Level: developer
3557: Note:
3558: Most users should employ the `KSP` interface for linear solvers
3559: instead of working directly with matrix algebra routines such as this.
3560: See, e.g., `KSPCreate()`.
3562: Fortran Note:
3563: A valid (non-null) `info` argument must be provided
3565: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3566: @*/
3567: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3568: {
3569: MatFactorInfo tinfo;
3571: PetscFunctionBegin;
3575: if (info) PetscAssertPointer(info, 4);
3578: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3579: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3580: MatCheckPreallocated(mat, 2);
3581: if (!info) {
3582: PetscCall(MatFactorInfoInitialize(&tinfo));
3583: info = &tinfo;
3584: }
3586: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3587: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3588: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3589: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3590: PetscFunctionReturn(PETSC_SUCCESS);
3591: }
3593: /*@
3594: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3595: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3597: Collective
3599: Input Parameters:
3600: + fact - the factor matrix obtained with `MatGetFactor()`
3601: . mat - the matrix
3602: - info - options for factorization
3604: Level: developer
3606: Notes:
3607: See `MatQRFactor()` for in-place factorization.
3609: Most users should employ the `KSP` interface for linear solvers
3610: instead of working directly with matrix algebra routines such as this.
3611: See, e.g., `KSPCreate()`.
3613: Fortran Note:
3614: A valid (non-null) `info` argument must be provided
3616: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3617: @*/
3618: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3619: {
3620: MatFactorInfo tinfo;
3622: PetscFunctionBegin;
3627: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3628: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3629: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3631: MatCheckPreallocated(mat, 2);
3632: if (!info) {
3633: PetscCall(MatFactorInfoInitialize(&tinfo));
3634: info = &tinfo;
3635: }
3637: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3638: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3639: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3640: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3641: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3642: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3643: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3644: PetscFunctionReturn(PETSC_SUCCESS);
3645: }
3647: /*@
3648: MatSolve - Solves $A x = b$, given a factored matrix.
3650: Neighbor-wise Collective
3652: Input Parameters:
3653: + mat - the factored matrix
3654: - b - the right-hand-side vector
3656: Output Parameter:
3657: . x - the result vector
3659: Level: developer
3661: Notes:
3662: The vectors `b` and `x` cannot be the same. I.e., one cannot
3663: call `MatSolve`(A,x,x).
3665: Most users should employ the `KSP` interface for linear solvers
3666: instead of working directly with matrix algebra routines such as this.
3667: See, e.g., `KSPCreate()`.
3669: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3670: @*/
3671: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3672: {
3673: PetscFunctionBegin;
3678: PetscCheckSameComm(mat, 1, b, 2);
3679: PetscCheckSameComm(mat, 1, x, 3);
3680: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3681: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3682: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3683: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3684: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3685: MatCheckPreallocated(mat, 1);
3687: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3688: PetscCall(VecFlag(x, mat->factorerrortype));
3689: if (mat->factorerrortype) PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3690: else PetscUseTypeMethod(mat, solve, b, x);
3691: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3692: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3693: PetscFunctionReturn(PETSC_SUCCESS);
3694: }
3696: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3697: {
3698: Vec b, x;
3699: PetscInt N, i;
3700: PetscErrorCode (*f)(Mat, Vec, Vec);
3701: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3703: PetscFunctionBegin;
3704: if (A->factorerrortype) {
3705: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3706: PetscCall(MatSetInf(X));
3707: PetscFunctionReturn(PETSC_SUCCESS);
3708: }
3709: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3710: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3711: PetscCall(MatBoundToCPU(A, &Abound));
3712: if (!Abound) {
3713: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3714: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3715: }
3716: #if PetscDefined(HAVE_CUDA)
3717: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3718: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3719: #elif PetscDefined(HAVE_HIP)
3720: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3721: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3722: #endif
3723: PetscCall(MatGetSize(B, NULL, &N));
3724: for (i = 0; i < N; i++) {
3725: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3726: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3727: PetscCall((*f)(A, b, x));
3728: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3729: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3730: }
3731: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3732: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3733: PetscFunctionReturn(PETSC_SUCCESS);
3734: }
3736: /*@
3737: MatMatSolve - Solves $A X = B$, given a factored matrix.
3739: Neighbor-wise Collective
3741: Input Parameters:
3742: + A - the factored matrix
3743: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3745: Output Parameter:
3746: . X - the result matrix (dense matrix)
3748: Level: developer
3750: Note:
3751: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3752: otherwise, `B` and `X` cannot be the same.
3754: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3755: @*/
3756: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3757: {
3758: PetscFunctionBegin;
3763: PetscCheckSameComm(A, 1, B, 2);
3764: PetscCheckSameComm(A, 1, X, 3);
3765: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3766: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3767: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3768: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3769: MatCheckPreallocated(A, 1);
3771: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3772: if (!A->ops->matsolve) {
3773: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3774: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3775: } else PetscUseTypeMethod(A, matsolve, B, X);
3776: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3777: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3778: PetscFunctionReturn(PETSC_SUCCESS);
3779: }
3781: /*@
3782: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3784: Neighbor-wise Collective
3786: Input Parameters:
3787: + A - the factored matrix
3788: - B - the right-hand-side matrix (`MATDENSE` matrix)
3790: Output Parameter:
3791: . X - the result matrix (dense matrix)
3793: Level: developer
3795: Note:
3796: The matrices `B` and `X` cannot be the same. I.e., one cannot
3797: call `MatMatSolveTranspose`(A,X,X).
3799: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3800: @*/
3801: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3802: {
3803: PetscFunctionBegin;
3808: PetscCheckSameComm(A, 1, B, 2);
3809: PetscCheckSameComm(A, 1, X, 3);
3810: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3811: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3812: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3813: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3814: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3815: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3816: MatCheckPreallocated(A, 1);
3818: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3819: if (!A->ops->matsolvetranspose) {
3820: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3821: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3822: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3823: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3824: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3825: PetscFunctionReturn(PETSC_SUCCESS);
3826: }
3828: /*@
3829: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3831: Neighbor-wise Collective
3833: Input Parameters:
3834: + A - the factored matrix
3835: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3837: Output Parameter:
3838: . X - the result matrix (dense matrix)
3840: Level: developer
3842: Note:
3843: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3844: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3846: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3847: @*/
3848: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3849: {
3850: PetscFunctionBegin;
3855: PetscCheckSameComm(A, 1, Bt, 2);
3856: PetscCheckSameComm(A, 1, X, 3);
3858: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3859: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3860: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3861: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3862: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3863: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3864: MatCheckPreallocated(A, 1);
3866: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3867: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3868: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3869: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3870: PetscFunctionReturn(PETSC_SUCCESS);
3871: }
3873: /*@
3874: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3875: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3877: Neighbor-wise Collective
3879: Input Parameters:
3880: + mat - the factored matrix
3881: - b - the right-hand-side vector
3883: Output Parameter:
3884: . x - the result vector
3886: Level: developer
3888: Notes:
3889: `MatSolve()` should be used for most applications, as it performs
3890: a forward solve followed by a backward solve.
3892: The vectors `b` and `x` cannot be the same, i.e., one cannot
3893: call `MatForwardSolve`(A,x,x).
3895: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3896: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3897: `MatForwardSolve()` solves $U^T*D y = b$, and
3898: `MatBackwardSolve()` solves $U x = y$.
3899: Thus they do not provide a symmetric preconditioner.
3901: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3902: @*/
3903: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3904: {
3905: PetscFunctionBegin;
3910: PetscCheckSameComm(mat, 1, b, 2);
3911: PetscCheckSameComm(mat, 1, x, 3);
3912: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3913: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3914: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3915: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3916: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3917: MatCheckPreallocated(mat, 1);
3919: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3920: PetscUseTypeMethod(mat, forwardsolve, b, x);
3921: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3922: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3923: PetscFunctionReturn(PETSC_SUCCESS);
3924: }
3926: /*@
3927: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3928: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3930: Neighbor-wise Collective
3932: Input Parameters:
3933: + mat - the factored matrix
3934: - b - the right-hand-side vector
3936: Output Parameter:
3937: . x - the result vector
3939: Level: developer
3941: Notes:
3942: `MatSolve()` should be used for most applications, as it performs
3943: a forward solve followed by a backward solve.
3945: The vectors `b` and `x` cannot be the same. I.e., one cannot
3946: call `MatBackwardSolve`(A,x,x).
3948: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3949: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3950: `MatForwardSolve()` solves $U^T*D y = b$, and
3951: `MatBackwardSolve()` solves $U x = y$.
3952: Thus they do not provide a symmetric preconditioner.
3954: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3955: @*/
3956: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3957: {
3958: PetscFunctionBegin;
3963: PetscCheckSameComm(mat, 1, b, 2);
3964: PetscCheckSameComm(mat, 1, x, 3);
3965: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3966: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3967: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3968: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3969: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3970: MatCheckPreallocated(mat, 1);
3972: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3973: PetscUseTypeMethod(mat, backwardsolve, b, x);
3974: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3975: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3976: PetscFunctionReturn(PETSC_SUCCESS);
3977: }
3979: /*@
3980: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
3982: Neighbor-wise Collective
3984: Input Parameters:
3985: + mat - the factored matrix
3986: . b - the right-hand-side vector
3987: - y - the vector to be added to
3989: Output Parameter:
3990: . x - the result vector
3992: Level: developer
3994: Note:
3995: The vectors `b` and `x` cannot be the same. I.e., one cannot
3996: call `MatSolveAdd`(A,x,y,x).
3998: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3999: @*/
4000: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
4001: {
4002: PetscScalar one = 1.0;
4003: Vec tmp;
4005: PetscFunctionBegin;
4011: PetscCheckSameComm(mat, 1, b, 2);
4012: PetscCheckSameComm(mat, 1, y, 3);
4013: PetscCheckSameComm(mat, 1, x, 4);
4014: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4015: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4016: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4017: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4018: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4019: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4020: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4021: MatCheckPreallocated(mat, 1);
4023: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4024: PetscCall(VecFlag(x, mat->factorerrortype));
4025: if (mat->factorerrortype) {
4026: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4027: } else if (mat->ops->solveadd) {
4028: PetscUseTypeMethod(mat, solveadd, b, y, x);
4029: } else {
4030: /* do the solve then the add manually */
4031: if (x != y) {
4032: PetscCall(MatSolve(mat, b, x));
4033: PetscCall(VecAXPY(x, one, y));
4034: } else {
4035: PetscCall(VecDuplicate(x, &tmp));
4036: PetscCall(VecCopy(x, tmp));
4037: PetscCall(MatSolve(mat, b, x));
4038: PetscCall(VecAXPY(x, one, tmp));
4039: PetscCall(VecDestroy(&tmp));
4040: }
4041: }
4042: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4043: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4044: PetscFunctionReturn(PETSC_SUCCESS);
4045: }
4047: /*@
4048: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4050: Neighbor-wise Collective
4052: Input Parameters:
4053: + mat - the factored matrix
4054: - b - the right-hand-side vector
4056: Output Parameter:
4057: . x - the result vector
4059: Level: developer
4061: Notes:
4062: The vectors `b` and `x` cannot be the same. I.e., one cannot
4063: call `MatSolveTranspose`(A,x,x).
4065: Most users should employ the `KSP` interface for linear solvers
4066: instead of working directly with matrix algebra routines such as this.
4067: See, e.g., `KSPCreate()`.
4069: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4070: @*/
4071: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4072: {
4073: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4075: PetscFunctionBegin;
4080: PetscCheckSameComm(mat, 1, b, 2);
4081: PetscCheckSameComm(mat, 1, x, 3);
4082: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4083: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4084: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4085: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4086: MatCheckPreallocated(mat, 1);
4087: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4088: PetscCall(VecFlag(x, mat->factorerrortype));
4089: if (mat->factorerrortype) {
4090: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4091: } else {
4092: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4093: PetscCall((*f)(mat, b, x));
4094: }
4095: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4096: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4097: PetscFunctionReturn(PETSC_SUCCESS);
4098: }
4100: /*@
4101: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4102: factored matrix.
4104: Neighbor-wise Collective
4106: Input Parameters:
4107: + mat - the factored matrix
4108: . b - the right-hand-side vector
4109: - y - the vector to be added to
4111: Output Parameter:
4112: . x - the result vector
4114: Level: developer
4116: Note:
4117: The vectors `b` and `x` cannot be the same. I.e., one cannot
4118: call `MatSolveTransposeAdd`(A,x,y,x).
4120: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4121: @*/
4122: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4123: {
4124: PetscScalar one = 1.0;
4125: Vec tmp;
4126: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4128: PetscFunctionBegin;
4134: PetscCheckSameComm(mat, 1, b, 2);
4135: PetscCheckSameComm(mat, 1, y, 3);
4136: PetscCheckSameComm(mat, 1, x, 4);
4137: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4138: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4139: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4140: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4141: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4142: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4143: MatCheckPreallocated(mat, 1);
4145: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4146: PetscCall(VecFlag(x, mat->factorerrortype));
4147: if (mat->factorerrortype) {
4148: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4149: } else if (f) {
4150: PetscCall((*f)(mat, b, y, x));
4151: } else {
4152: /* do the solve then the add manually */
4153: if (x != y) {
4154: PetscCall(MatSolveTranspose(mat, b, x));
4155: PetscCall(VecAXPY(x, one, y));
4156: } else {
4157: PetscCall(VecDuplicate(x, &tmp));
4158: PetscCall(VecCopy(x, tmp));
4159: PetscCall(MatSolveTranspose(mat, b, x));
4160: PetscCall(VecAXPY(x, one, tmp));
4161: PetscCall(VecDestroy(&tmp));
4162: }
4163: }
4164: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4165: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4166: PetscFunctionReturn(PETSC_SUCCESS);
4167: }
4169: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4170: /*@
4171: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4173: Neighbor-wise Collective
4175: Input Parameters:
4176: + mat - the matrix
4177: . b - the right-hand side
4178: . omega - the relaxation factor
4179: . flag - flag indicating the type of SOR (see below)
4180: . shift - diagonal shift
4181: . its - the number of iterations
4182: - lits - the number of local iterations
4184: Output Parameter:
4185: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4187: SOR Flags:
4188: + `SOR_FORWARD_SWEEP` - forward SOR
4189: . `SOR_BACKWARD_SWEEP` - backward SOR
4190: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4191: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4192: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4193: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4194: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4195: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4196: upper/lower triangular part of matrix to
4197: vector (with omega)
4198: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4200: Level: developer
4202: Notes:
4203: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4204: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4205: on each processor.
4207: Application programmers will not generally use `MatSOR()` directly,
4208: but instead will employ the `KSP`/`PC` interface.
4210: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4212: Most users should employ the `KSP` interface for linear solvers
4213: instead of working directly with matrix algebra routines such as this.
4214: See, e.g., `KSPCreate()`.
4216: Vectors `x` and `b` CANNOT be the same
4218: The flags are implemented as bitwise inclusive or operations.
4219: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4220: to specify a zero initial guess for SSOR.
4222: Developer Note:
4223: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4225: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4226: @*/
4227: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4228: {
4229: PetscFunctionBegin;
4234: PetscCheckSameComm(mat, 1, b, 2);
4235: PetscCheckSameComm(mat, 1, x, 8);
4236: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4237: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4238: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4239: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4240: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4241: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4242: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4243: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4245: MatCheckPreallocated(mat, 1);
4246: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4247: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4248: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4249: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4250: PetscFunctionReturn(PETSC_SUCCESS);
4251: }
4253: /*
4254: Default matrix copy routine.
4255: */
4256: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4257: {
4258: PetscInt i, rstart = 0, rend = 0, nz;
4259: const PetscInt *cwork;
4260: const PetscScalar *vwork;
4262: PetscFunctionBegin;
4263: if (B->assembled) PetscCall(MatZeroEntries(B));
4264: if (str == SAME_NONZERO_PATTERN) {
4265: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4266: for (i = rstart; i < rend; i++) {
4267: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4268: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4269: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4270: }
4271: } else {
4272: PetscCall(MatAYPX(B, 0.0, A, str));
4273: }
4274: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4275: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4276: PetscFunctionReturn(PETSC_SUCCESS);
4277: }
4279: /*@
4280: MatCopy - Copies a matrix to another matrix.
4282: Collective
4284: Input Parameters:
4285: + A - the matrix
4286: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4288: Output Parameter:
4289: . B - where the copy is put
4291: Level: intermediate
4293: Notes:
4294: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4296: `MatCopy()` copies the matrix entries of a matrix to another existing
4297: matrix (after first zeroing the second matrix). A related routine is
4298: `MatConvert()`, which first creates a new matrix and then copies the data.
4300: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4301: @*/
4302: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4303: {
4304: PetscInt i;
4306: PetscFunctionBegin;
4311: PetscCheckSameComm(A, 1, B, 2);
4312: MatCheckPreallocated(B, 2);
4313: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4314: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4315: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4316: A->cmap->N, B->cmap->N);
4317: MatCheckPreallocated(A, 1);
4318: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4320: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4321: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4322: else PetscCall(MatCopy_Basic(A, B, str));
4324: B->stencil.dim = A->stencil.dim;
4325: B->stencil.noc = A->stencil.noc;
4326: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4327: B->stencil.dims[i] = A->stencil.dims[i];
4328: B->stencil.starts[i] = A->stencil.starts[i];
4329: }
4331: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4332: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4333: PetscFunctionReturn(PETSC_SUCCESS);
4334: }
4336: /*@
4337: MatConvert - Converts a matrix to another matrix, either of the same
4338: or different type.
4340: Collective
4342: Input Parameters:
4343: + mat - the matrix
4344: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4345: same type as the original matrix.
4346: - reuse - denotes if the destination matrix is to be created or reused.
4347: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input `Mat` to be changed to contain the matrix in the new format), otherwise use
4348: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4350: Output Parameter:
4351: . M - pointer to place new matrix
4353: Level: intermediate
4355: Notes:
4356: `MatConvert()` first creates a new matrix and then copies the data from
4357: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4358: entries of one matrix to another already existing matrix context.
4360: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4361: the MPI communicator of the generated matrix is always the same as the communicator
4362: of the input matrix.
4364: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4365: @*/
4366: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4367: {
4368: PetscBool sametype, issame, flg;
4369: PetscBool3 issymmetric, ishermitian;
4370: char convname[256], mtype[256];
4371: Mat B;
4373: PetscFunctionBegin;
4376: PetscAssertPointer(M, 4);
4377: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4378: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4379: MatCheckPreallocated(mat, 1);
4381: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4382: if (flg) newtype = mtype;
4384: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4385: PetscCall(PetscStrcmp(newtype, "same", &issame));
4386: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4387: if (reuse == MAT_REUSE_MATRIX) {
4389: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4390: }
4392: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4393: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4394: PetscFunctionReturn(PETSC_SUCCESS);
4395: }
4397: /* Cache Mat options because some converters use MatHeaderReplace */
4398: issymmetric = mat->symmetric;
4399: ishermitian = mat->hermitian;
4401: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4402: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4403: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4404: } else {
4405: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4406: const char *prefix[3] = {"seq", "mpi", ""};
4407: PetscInt i;
4408: /*
4409: Order of precedence:
4410: 0) See if newtype is a superclass of the current matrix.
4411: 1) See if a specialized converter is known to the current matrix.
4412: 2) See if a specialized converter is known to the desired matrix class.
4413: 3) See if a good general converter is registered for the desired class
4414: (as of 6/27/03 only MATMPIADJ falls into this category).
4415: 4) See if a good general converter is known for the current matrix.
4416: 5) Use a really basic converter.
4417: */
4419: /* 0) See if newtype is a superclass of the current matrix.
4420: i.e mat is mpiaij and newtype is aij */
4421: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4422: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4423: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4424: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4425: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4426: if (flg) {
4427: if (reuse == MAT_INPLACE_MATRIX) {
4428: PetscCall(PetscInfo(mat, "Early return\n"));
4429: PetscFunctionReturn(PETSC_SUCCESS);
4430: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4431: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4432: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4433: PetscFunctionReturn(PETSC_SUCCESS);
4434: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4435: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4436: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4437: PetscFunctionReturn(PETSC_SUCCESS);
4438: }
4439: }
4440: }
4441: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4442: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4443: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4444: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4445: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4446: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4447: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4448: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4449: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4450: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4451: if (conv) goto foundconv;
4452: }
4454: /* 2) See if a specialized converter is known to the desired matrix class. */
4455: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4456: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4457: PetscCall(MatSetType(B, newtype));
4458: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4459: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4460: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4461: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4462: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4463: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4464: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4465: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4466: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4467: if (conv) {
4468: PetscCall(MatDestroy(&B));
4469: goto foundconv;
4470: }
4471: }
4473: /* 3) See if a good general converter is registered for the desired class */
4474: conv = B->ops->convertfrom;
4475: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4476: PetscCall(MatDestroy(&B));
4477: if (conv) goto foundconv;
4479: /* 4) See if a good general converter is known for the current matrix */
4480: if (mat->ops->convert) conv = mat->ops->convert;
4481: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4482: if (conv) goto foundconv;
4484: /* 5) Use a really basic converter. */
4485: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4486: conv = MatConvert_Basic;
4488: foundconv:
4489: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4490: PetscCall((*conv)(mat, newtype, reuse, M));
4491: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4492: /* the block sizes must be same if the mappings are copied over */
4493: (*M)->rmap->bs = mat->rmap->bs;
4494: (*M)->cmap->bs = mat->cmap->bs;
4495: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4496: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4497: (*M)->rmap->mapping = mat->rmap->mapping;
4498: (*M)->cmap->mapping = mat->cmap->mapping;
4499: }
4500: (*M)->stencil.dim = mat->stencil.dim;
4501: (*M)->stencil.noc = mat->stencil.noc;
4502: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4503: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4504: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4505: }
4506: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4507: }
4508: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4510: /* Copy Mat options */
4511: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4512: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4513: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4514: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4515: PetscFunctionReturn(PETSC_SUCCESS);
4516: }
4518: /*@
4519: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4521: Not Collective
4523: Input Parameter:
4524: . mat - the matrix, must be a factored matrix
4526: Output Parameter:
4527: . type - the string name of the package (do not free this string)
4529: Level: intermediate
4531: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4532: @*/
4533: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4534: {
4535: PetscErrorCode (*conv)(Mat, MatSolverType *);
4537: PetscFunctionBegin;
4540: PetscAssertPointer(type, 2);
4541: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4542: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4543: if (conv) PetscCall((*conv)(mat, type));
4544: else *type = MATSOLVERPETSC;
4545: PetscFunctionReturn(PETSC_SUCCESS);
4546: }
4548: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4549: struct _MatSolverTypeForSpecifcType {
4550: MatType mtype;
4551: /* no entry for MAT_FACTOR_NONE */
4552: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4553: MatSolverTypeForSpecifcType next;
4554: };
4556: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4557: struct _MatSolverTypeHolder {
4558: char *name;
4559: MatSolverTypeForSpecifcType handlers;
4560: MatSolverTypeHolder next;
4561: };
4563: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4565: /*@C
4566: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4568: Logically Collective, No Fortran Support
4570: Input Parameters:
4571: + package - name of the package, for example `petsc` or `superlu`
4572: . mtype - the matrix type that works with this package
4573: . ftype - the type of factorization supported by the package
4574: - createfactor - routine that will create the factored matrix ready to be used
4576: Level: developer
4578: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4579: `MatGetFactor()`
4580: @*/
4581: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4582: {
4583: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4584: PetscBool flg;
4585: MatSolverTypeForSpecifcType inext, iprev = NULL;
4587: PetscFunctionBegin;
4588: PetscCall(MatInitializePackage());
4589: if (!next) {
4590: PetscCall(PetscNew(&MatSolverTypeHolders));
4591: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4592: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4593: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4594: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4595: PetscFunctionReturn(PETSC_SUCCESS);
4596: }
4597: while (next) {
4598: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4599: if (flg) {
4600: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4601: inext = next->handlers;
4602: while (inext) {
4603: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4604: if (flg) {
4605: inext->createfactor[(int)ftype - 1] = createfactor;
4606: PetscFunctionReturn(PETSC_SUCCESS);
4607: }
4608: iprev = inext;
4609: inext = inext->next;
4610: }
4611: PetscCall(PetscNew(&iprev->next));
4612: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4613: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4614: PetscFunctionReturn(PETSC_SUCCESS);
4615: }
4616: prev = next;
4617: next = next->next;
4618: }
4619: PetscCall(PetscNew(&prev->next));
4620: PetscCall(PetscStrallocpy(package, &prev->next->name));
4621: PetscCall(PetscNew(&prev->next->handlers));
4622: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4623: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4624: PetscFunctionReturn(PETSC_SUCCESS);
4625: }
4627: /*@C
4628: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4630: Input Parameters:
4631: + type - name of the package, for example `petsc` or `superlu`, if this is 'NULL', then the first result that satisfies the other criteria is returned
4632: . ftype - the type of factorization supported by the type
4633: - mtype - the matrix type that works with this type
4635: Output Parameters:
4636: + foundtype - `PETSC_TRUE` if the type was registered
4637: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4638: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4640: Calling sequence of `createfactor`:
4641: + A - the matrix providing the factor matrix
4642: . ftype - the `MatFactorType` of the factor requested
4643: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4645: Level: developer
4647: Note:
4648: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4649: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4650: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4652: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4653: `MatInitializePackage()`
4654: @*/
4655: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4656: {
4657: MatSolverTypeHolder next = MatSolverTypeHolders;
4658: PetscBool flg;
4659: MatSolverTypeForSpecifcType inext;
4661: PetscFunctionBegin;
4662: if (foundtype) *foundtype = PETSC_FALSE;
4663: if (foundmtype) *foundmtype = PETSC_FALSE;
4664: if (createfactor) *createfactor = NULL;
4666: if (type) {
4667: while (next) {
4668: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4669: if (flg) {
4670: if (foundtype) *foundtype = PETSC_TRUE;
4671: inext = next->handlers;
4672: while (inext) {
4673: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4674: if (flg) {
4675: if (foundmtype) *foundmtype = PETSC_TRUE;
4676: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4677: PetscFunctionReturn(PETSC_SUCCESS);
4678: }
4679: inext = inext->next;
4680: }
4681: }
4682: next = next->next;
4683: }
4684: } else {
4685: while (next) {
4686: inext = next->handlers;
4687: while (inext) {
4688: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4689: if (flg && inext->createfactor[(int)ftype - 1]) {
4690: if (foundtype) *foundtype = PETSC_TRUE;
4691: if (foundmtype) *foundmtype = PETSC_TRUE;
4692: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4693: PetscFunctionReturn(PETSC_SUCCESS);
4694: }
4695: inext = inext->next;
4696: }
4697: next = next->next;
4698: }
4699: /* try with base classes inext->mtype */
4700: next = MatSolverTypeHolders;
4701: while (next) {
4702: inext = next->handlers;
4703: while (inext) {
4704: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4705: if (flg && inext->createfactor[(int)ftype - 1]) {
4706: if (foundtype) *foundtype = PETSC_TRUE;
4707: if (foundmtype) *foundmtype = PETSC_TRUE;
4708: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4709: PetscFunctionReturn(PETSC_SUCCESS);
4710: }
4711: inext = inext->next;
4712: }
4713: next = next->next;
4714: }
4715: }
4716: PetscFunctionReturn(PETSC_SUCCESS);
4717: }
4719: PetscErrorCode MatSolverTypeDestroy(void)
4720: {
4721: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4722: MatSolverTypeForSpecifcType inext, iprev;
4724: PetscFunctionBegin;
4725: while (next) {
4726: PetscCall(PetscFree(next->name));
4727: inext = next->handlers;
4728: while (inext) {
4729: PetscCall(PetscFree(inext->mtype));
4730: iprev = inext;
4731: inext = inext->next;
4732: PetscCall(PetscFree(iprev));
4733: }
4734: prev = next;
4735: next = next->next;
4736: PetscCall(PetscFree(prev));
4737: }
4738: MatSolverTypeHolders = NULL;
4739: PetscFunctionReturn(PETSC_SUCCESS);
4740: }
4742: /*@
4743: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4745: Logically Collective
4747: Input Parameter:
4748: . mat - the matrix
4750: Output Parameter:
4751: . flg - `PETSC_TRUE` if uses the ordering
4753: Level: developer
4755: Note:
4756: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4757: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4759: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4760: @*/
4761: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4762: {
4763: PetscFunctionBegin;
4764: *flg = mat->canuseordering;
4765: PetscFunctionReturn(PETSC_SUCCESS);
4766: }
4768: /*@
4769: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4771: Logically Collective
4773: Input Parameters:
4774: + mat - the matrix obtained with `MatGetFactor()`
4775: - ftype - the factorization type to be used
4777: Output Parameter:
4778: . otype - the preferred ordering type
4780: Level: developer
4782: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4783: @*/
4784: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4785: {
4786: PetscFunctionBegin;
4787: *otype = mat->preferredordering[ftype];
4788: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4789: PetscFunctionReturn(PETSC_SUCCESS);
4790: }
4792: /*@
4793: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()
4795: Collective
4797: Input Parameters:
4798: + mat - the matrix
4799: . type - name of solver type, for example, `superlu`, `petsc` (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4800: the other criteria is returned
4801: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4803: Output Parameter:
4804: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4806: Options Database Keys:
4807: + -pc_factor_mat_solver_type <type> - choose the type at run time. When using `KSP` solvers
4808: . -pc_factor_mat_factor_on_host <bool> - do mat factorization on host (with device matrices). Default is doing it on device
4809: - -pc_factor_mat_solve_on_host <bool> - do mat solve on host (with device matrices). Default is doing it on device
4811: Level: intermediate
4813: Notes:
4814: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4815: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4817: Users usually access the factorization solvers via `KSP`
4819: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4820: such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir
4822: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4823: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4824: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4826: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4827: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4828: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4830: Developer Note:
4831: This should actually be called `MatCreateFactor()` since it creates a new factor object
4833: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4834: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4835: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4836: @*/
4837: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4838: {
4839: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4840: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4842: PetscFunctionBegin;
4846: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4847: MatCheckPreallocated(mat, 1);
4849: PetscCall(MatIsShell(mat, &shell));
4850: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4851: if (hasop) {
4852: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4853: PetscFunctionReturn(PETSC_SUCCESS);
4854: }
4856: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4857: if (!foundtype) {
4858: if (type) {
4859: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4860: ((PetscObject)mat)->type_name, type);
4861: } else {
4862: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4863: }
4864: }
4865: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4866: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4868: PetscCall((*conv)(mat, ftype, f));
4869: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4870: PetscFunctionReturn(PETSC_SUCCESS);
4871: }
4873: /*@
4874: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4876: Not Collective
4878: Input Parameters:
4879: + mat - the matrix
4880: . type - name of solver type, for example, `superlu`, `petsc` (to use PETSc's default)
4881: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4883: Output Parameter:
4884: . flg - PETSC_TRUE if the factorization is available
4886: Level: intermediate
4888: Notes:
4889: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4890: such as pastix, superlu, mumps etc.
4892: PETSc must have been ./configure to use the external solver, using the option --download-package
4894: Developer Note:
4895: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4897: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4898: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4899: @*/
4900: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4901: {
4902: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4904: PetscFunctionBegin;
4906: PetscAssertPointer(flg, 4);
4908: *flg = PETSC_FALSE;
4909: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
4911: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4912: MatCheckPreallocated(mat, 1);
4914: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4915: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4916: PetscFunctionReturn(PETSC_SUCCESS);
4917: }
4919: /*@
4920: MatDuplicate - Duplicates a matrix including the non-zero structure.
4922: Collective
4924: Input Parameters:
4925: + mat - the matrix
4926: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4927: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4929: Output Parameter:
4930: . M - pointer to place new matrix
4932: Level: intermediate
4934: Notes:
4935: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
4937: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
4939: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4941: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4942: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4943: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
4945: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4946: @*/
4947: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4948: {
4949: Mat B;
4950: VecType vtype;
4951: PetscInt i;
4952: PetscObject dm, container_h, container_d;
4953: void (*viewf)(void);
4955: PetscFunctionBegin;
4958: PetscAssertPointer(M, 3);
4959: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4960: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4961: MatCheckPreallocated(mat, 1);
4963: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4964: PetscUseTypeMethod(mat, duplicate, op, M);
4965: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4966: B = *M;
4968: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4969: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4970: PetscCall(MatGetVecType(mat, &vtype));
4971: PetscCall(MatSetVecType(B, vtype));
4973: B->stencil.dim = mat->stencil.dim;
4974: B->stencil.noc = mat->stencil.noc;
4975: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4976: B->stencil.dims[i] = mat->stencil.dims[i];
4977: B->stencil.starts[i] = mat->stencil.starts[i];
4978: }
4980: B->nooffproczerorows = mat->nooffproczerorows;
4981: B->nooffprocentries = mat->nooffprocentries;
4983: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4984: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4985: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
4986: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
4987: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
4988: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
4989: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
4990: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4991: PetscFunctionReturn(PETSC_SUCCESS);
4992: }
4994: /*@
4995: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
4997: Logically Collective
4999: Input Parameter:
5000: . mat - the matrix
5002: Output Parameter:
5003: . v - the diagonal of the matrix
5005: Level: intermediate
5007: Note:
5008: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
5009: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
5010: is larger than `ndiag`, the values of the remaining entries are unspecified.
5012: Currently only correct in parallel for square matrices.
5014: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
5015: @*/
5016: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
5017: {
5018: PetscFunctionBegin;
5022: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5023: MatCheckPreallocated(mat, 1);
5024: if (PetscDefined(USE_DEBUG)) {
5025: PetscInt nv, row, col, ndiag;
5027: PetscCall(VecGetLocalSize(v, &nv));
5028: PetscCall(MatGetLocalSize(mat, &row, &col));
5029: ndiag = PetscMin(row, col);
5030: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5031: }
5033: PetscUseTypeMethod(mat, getdiagonal, v);
5034: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5035: PetscFunctionReturn(PETSC_SUCCESS);
5036: }
5038: /*@
5039: MatGetRowMin - Gets the minimum value (of the real part) of each
5040: row of the matrix
5042: Logically Collective
5044: Input Parameter:
5045: . mat - the matrix
5047: Output Parameters:
5048: + v - the vector for storing the maximums
5049: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5051: Level: intermediate
5053: Note:
5054: The result of this call are the same as if one converted the matrix to dense format
5055: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5057: This code is only implemented for a couple of matrix formats.
5059: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5060: `MatGetRowMax()`
5061: @*/
5062: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5063: {
5064: PetscFunctionBegin;
5068: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5070: if (!mat->cmap->N) {
5071: PetscCall(VecSet(v, PETSC_MAX_REAL));
5072: if (idx) {
5073: PetscInt i, m = mat->rmap->n;
5074: for (i = 0; i < m; i++) idx[i] = -1;
5075: }
5076: } else {
5077: MatCheckPreallocated(mat, 1);
5078: }
5079: PetscUseTypeMethod(mat, getrowmin, v, idx);
5080: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5081: PetscFunctionReturn(PETSC_SUCCESS);
5082: }
5084: /*@
5085: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5086: row of the matrix
5088: Logically Collective
5090: Input Parameter:
5091: . mat - the matrix
5093: Output Parameters:
5094: + v - the vector for storing the minimums
5095: - idx - the indices of the column found for each row (or `NULL` if not needed)
5097: Level: intermediate
5099: Notes:
5100: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5101: row is 0 (the first column).
5103: This code is only implemented for a couple of matrix formats.
5105: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5106: @*/
5107: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5108: {
5109: PetscFunctionBegin;
5113: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5114: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5116: if (!mat->cmap->N) {
5117: PetscCall(VecSet(v, 0.0));
5118: if (idx) {
5119: PetscInt i, m = mat->rmap->n;
5120: for (i = 0; i < m; i++) idx[i] = -1;
5121: }
5122: } else {
5123: MatCheckPreallocated(mat, 1);
5124: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5125: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5126: }
5127: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5128: PetscFunctionReturn(PETSC_SUCCESS);
5129: }
5131: /*@
5132: MatGetRowMax - Gets the maximum value (of the real part) of each
5133: row of the matrix
5135: Logically Collective
5137: Input Parameter:
5138: . mat - the matrix
5140: Output Parameters:
5141: + v - the vector for storing the maximums
5142: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5144: Level: intermediate
5146: Notes:
5147: The result of this call are the same as if one converted the matrix to dense format
5148: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5150: This code is only implemented for a couple of matrix formats.
5152: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5153: @*/
5154: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5155: {
5156: PetscFunctionBegin;
5160: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5162: if (!mat->cmap->N) {
5163: PetscCall(VecSet(v, PETSC_MIN_REAL));
5164: if (idx) {
5165: PetscInt i, m = mat->rmap->n;
5166: for (i = 0; i < m; i++) idx[i] = -1;
5167: }
5168: } else {
5169: MatCheckPreallocated(mat, 1);
5170: PetscUseTypeMethod(mat, getrowmax, v, idx);
5171: }
5172: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5173: PetscFunctionReturn(PETSC_SUCCESS);
5174: }
5176: /*@
5177: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5178: row of the matrix
5180: Logically Collective
5182: Input Parameter:
5183: . mat - the matrix
5185: Output Parameters:
5186: + v - the vector for storing the maximums
5187: - idx - the indices of the column found for each row (or `NULL` if not needed)
5189: Level: intermediate
5191: Notes:
5192: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5193: row is 0 (the first column).
5195: This code is only implemented for a couple of matrix formats.
5197: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5198: @*/
5199: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5200: {
5201: PetscFunctionBegin;
5205: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5207: if (!mat->cmap->N) {
5208: PetscCall(VecSet(v, 0.0));
5209: if (idx) {
5210: PetscInt i, m = mat->rmap->n;
5211: for (i = 0; i < m; i++) idx[i] = -1;
5212: }
5213: } else {
5214: MatCheckPreallocated(mat, 1);
5215: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5216: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5217: }
5218: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5219: PetscFunctionReturn(PETSC_SUCCESS);
5220: }
5222: /*@
5223: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5225: Logically Collective
5227: Input Parameter:
5228: . mat - the matrix
5230: Output Parameter:
5231: . v - the vector for storing the sum
5233: Level: intermediate
5235: This code is only implemented for a couple of matrix formats.
5237: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5238: @*/
5239: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5240: {
5241: PetscFunctionBegin;
5245: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5247: if (!mat->cmap->N) {
5248: PetscCall(VecSet(v, 0.0));
5249: } else {
5250: MatCheckPreallocated(mat, 1);
5251: PetscUseTypeMethod(mat, getrowsumabs, v);
5252: }
5253: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5254: PetscFunctionReturn(PETSC_SUCCESS);
5255: }
5257: /*@
5258: MatGetRowSum - Gets the sum of each row of the matrix
5260: Logically or Neighborhood Collective
5262: Input Parameter:
5263: . mat - the matrix
5265: Output Parameter:
5266: . v - the vector for storing the sum of rows
5268: Level: intermediate
5270: Note:
5271: This code is slow since it is not currently specialized for different formats
5273: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5274: @*/
5275: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5276: {
5277: Vec ones;
5279: PetscFunctionBegin;
5283: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5284: MatCheckPreallocated(mat, 1);
5285: PetscCall(MatCreateVecs(mat, &ones, NULL));
5286: PetscCall(VecSet(ones, 1.));
5287: PetscCall(MatMult(mat, ones, v));
5288: PetscCall(VecDestroy(&ones));
5289: PetscFunctionReturn(PETSC_SUCCESS);
5290: }
5292: /*@
5293: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5294: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5296: Collective
5298: Input Parameter:
5299: . mat - the matrix to provide the transpose
5301: Output Parameter:
5302: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5304: Level: advanced
5306: Note:
5307: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5308: routine allows bypassing that call.
5310: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5311: @*/
5312: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5313: {
5314: MatParentState *rb = NULL;
5316: PetscFunctionBegin;
5317: PetscCall(PetscNew(&rb));
5318: rb->id = ((PetscObject)mat)->id;
5319: rb->state = 0;
5320: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5321: PetscCall(PetscObjectContainerCompose((PetscObject)B, "MatTransposeParent", rb, PetscCtxDestroyDefault));
5322: PetscFunctionReturn(PETSC_SUCCESS);
5323: }
5325: /*@
5326: MatTranspose - Computes the transpose of a matrix, either in-place or out-of-place.
5328: Collective
5330: Input Parameters:
5331: + mat - the matrix to transpose
5332: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5334: Output Parameter:
5335: . B - the transpose of the matrix
5337: Level: intermediate
5339: Notes:
5340: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5342: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX` to store the transpose. If you already have a matrix to contain the
5343: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5345: If the nonzero structure of `mat` changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5347: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose but don't need the storage to be changed.
5348: For example, the result of `MatCreateTranspose()` will compute the transpose of the given matrix times a vector for matrix-vector products computed with `MatMult()`.
5350: If `mat` is unchanged from the last call this function returns immediately without recomputing the result
5352: If you only need the symbolic transpose of a matrix, and not the numerical values, use `MatTransposeSymbolic()`
5354: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5355: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5356: @*/
5357: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5358: {
5359: PetscContainer rB = NULL;
5360: MatParentState *rb = NULL;
5362: PetscFunctionBegin;
5365: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5366: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5367: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5368: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5369: MatCheckPreallocated(mat, 1);
5370: if (reuse == MAT_REUSE_MATRIX) {
5371: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5372: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5373: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5374: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5375: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5376: }
5378: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5379: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5380: PetscUseTypeMethod(mat, transpose, reuse, B);
5381: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5382: }
5383: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5385: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5386: if (reuse != MAT_INPLACE_MATRIX) {
5387: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5388: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5389: rb->state = ((PetscObject)mat)->state;
5390: rb->nonzerostate = mat->nonzerostate;
5391: }
5392: PetscFunctionReturn(PETSC_SUCCESS);
5393: }
5395: /*@
5396: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5398: Collective
5400: Input Parameter:
5401: . A - the matrix to transpose
5403: Output Parameter:
5404: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5405: numerical portion.
5407: Level: intermediate
5409: Note:
5410: This is not supported for many matrix types, use `MatTranspose()` in those cases
5412: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5413: @*/
5414: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5415: {
5416: PetscFunctionBegin;
5419: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5420: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5421: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5422: PetscUseTypeMethod(A, transposesymbolic, B);
5423: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5425: PetscCall(MatTransposeSetPrecursor(A, *B));
5426: PetscFunctionReturn(PETSC_SUCCESS);
5427: }
5429: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5430: {
5431: PetscContainer rB;
5432: MatParentState *rb;
5434: PetscFunctionBegin;
5437: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5438: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5439: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5440: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5441: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5442: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5443: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5444: PetscFunctionReturn(PETSC_SUCCESS);
5445: }
5447: /*@
5448: MatIsTranspose - Test whether a matrix is another one's transpose,
5449: or its own, in which case it tests symmetry.
5451: Collective
5453: Input Parameters:
5454: + A - the matrix to test
5455: . B - the matrix to test against, this can equal the first parameter
5456: - tol - tolerance, differences between entries smaller than this are counted as zero
5458: Output Parameter:
5459: . flg - the result
5461: Level: intermediate
5463: Notes:
5464: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5465: test involves parallel copies of the block off-diagonal parts of the matrix.
5467: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5468: @*/
5469: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5470: {
5471: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5473: PetscFunctionBegin;
5476: PetscAssertPointer(flg, 4);
5477: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5478: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5479: *flg = PETSC_FALSE;
5480: if (f && g) {
5481: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5482: PetscCall((*f)(A, B, tol, flg));
5483: } else {
5484: MatType mattype;
5486: PetscCall(MatGetType(f ? B : A, &mattype));
5487: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5488: }
5489: PetscFunctionReturn(PETSC_SUCCESS);
5490: }
5492: /*@
5493: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5495: Collective
5497: Input Parameters:
5498: + mat - the matrix to transpose and complex conjugate
5499: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5501: Output Parameter:
5502: . B - the Hermitian transpose
5504: Level: intermediate
5506: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5507: @*/
5508: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5509: {
5510: PetscFunctionBegin;
5511: PetscCall(MatTranspose(mat, reuse, B));
5512: #if defined(PETSC_USE_COMPLEX)
5513: PetscCall(MatConjugate(*B));
5514: #endif
5515: PetscFunctionReturn(PETSC_SUCCESS);
5516: }
5518: /*@
5519: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5521: Collective
5523: Input Parameters:
5524: + A - the matrix to test
5525: . B - the matrix to test against, this can equal the first parameter
5526: - tol - tolerance, differences between entries smaller than this are counted as zero
5528: Output Parameter:
5529: . flg - the result
5531: Level: intermediate
5533: Notes:
5534: Only available for `MATAIJ` matrices.
5536: The sequential algorithm
5537: has a running time of the order of the number of nonzeros; the parallel
5538: test involves parallel copies of the block off-diagonal parts of the matrix.
5540: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5541: @*/
5542: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5543: {
5544: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5546: PetscFunctionBegin;
5549: PetscAssertPointer(flg, 4);
5550: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5551: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5552: if (f && g) {
5553: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5554: PetscCall((*f)(A, B, tol, flg));
5555: }
5556: PetscFunctionReturn(PETSC_SUCCESS);
5557: }
5559: /*@
5560: MatPermute - Creates a new matrix with rows and columns permuted from the
5561: original.
5563: Collective
5565: Input Parameters:
5566: + mat - the matrix to permute
5567: . row - row permutation, each processor supplies only the permutation for its rows
5568: - col - column permutation, each processor supplies only the permutation for its columns
5570: Output Parameter:
5571: . B - the permuted matrix
5573: Level: advanced
5575: Note:
5576: The index sets map from row/col of permuted matrix to row/col of original matrix.
5577: The index sets should be on the same communicator as mat and have the same local sizes.
5579: Developer Note:
5580: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5581: exploit the fact that row and col are permutations, consider implementing the
5582: more general `MatCreateSubMatrix()` instead.
5584: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5585: @*/
5586: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5587: {
5588: PetscFunctionBegin;
5593: PetscAssertPointer(B, 4);
5594: PetscCheckSameComm(mat, 1, row, 2);
5595: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5596: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5597: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5598: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5599: MatCheckPreallocated(mat, 1);
5601: if (mat->ops->permute) {
5602: PetscUseTypeMethod(mat, permute, row, col, B);
5603: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5604: } else {
5605: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5606: }
5607: PetscFunctionReturn(PETSC_SUCCESS);
5608: }
5610: /*@
5611: MatEqual - Compares two matrices.
5613: Collective
5615: Input Parameters:
5616: + A - the first matrix
5617: - B - the second matrix
5619: Output Parameter:
5620: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5622: Level: intermediate
5624: Note:
5625: If either of the matrix is "matrix-free", meaning the matrix entries are not stored explicitly then equality is determined by comparing the results of several matrix-vector product
5626: using several randomly created vectors, see `MatMultEqual()`.
5628: .seealso: [](ch_matrices), `Mat`, `MatMultEqual()`
5629: @*/
5630: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5631: {
5632: PetscFunctionBegin;
5637: PetscAssertPointer(flg, 3);
5638: PetscCheckSameComm(A, 1, B, 2);
5639: MatCheckPreallocated(A, 1);
5640: MatCheckPreallocated(B, 2);
5641: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5642: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5643: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5644: B->cmap->N);
5645: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5646: PetscUseTypeMethod(A, equal, B, flg);
5647: } else {
5648: PetscCall(MatMultEqual(A, B, 10, flg));
5649: }
5650: PetscFunctionReturn(PETSC_SUCCESS);
5651: }
5653: /*@
5654: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5655: matrices that are stored as vectors. Either of the two scaling
5656: matrices can be `NULL`.
5658: Collective
5660: Input Parameters:
5661: + mat - the matrix to be scaled
5662: . l - the left scaling vector (or `NULL`)
5663: - r - the right scaling vector (or `NULL`)
5665: Level: intermediate
5667: Note:
5668: `MatDiagonalScale()` computes $A = LAR$, where
5669: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5670: The L scales the rows of the matrix, the R scales the columns of the matrix.
5672: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5673: @*/
5674: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5675: {
5676: PetscFunctionBegin;
5679: if (l) {
5681: PetscCheckSameComm(mat, 1, l, 2);
5682: }
5683: if (r) {
5685: PetscCheckSameComm(mat, 1, r, 3);
5686: }
5687: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5688: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5689: MatCheckPreallocated(mat, 1);
5690: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5692: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5693: PetscUseTypeMethod(mat, diagonalscale, l, r);
5694: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5695: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5696: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5697: PetscFunctionReturn(PETSC_SUCCESS);
5698: }
5700: /*@
5701: MatScale - Scales all elements of a matrix by a given number.
5703: Logically Collective
5705: Input Parameters:
5706: + mat - the matrix to be scaled
5707: - a - the scaling value
5709: Level: intermediate
5711: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5712: @*/
5713: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5714: {
5715: PetscFunctionBegin;
5718: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5719: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5721: MatCheckPreallocated(mat, 1);
5723: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5724: if (a != (PetscScalar)1.0) {
5725: PetscUseTypeMethod(mat, scale, a);
5726: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5727: }
5728: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5729: PetscFunctionReturn(PETSC_SUCCESS);
5730: }
5732: /*@
5733: MatNorm - Calculates various norms of a matrix.
5735: Collective
5737: Input Parameters:
5738: + mat - the matrix
5739: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5741: Output Parameter:
5742: . nrm - the resulting norm
5744: Level: intermediate
5746: .seealso: [](ch_matrices), `Mat`
5747: @*/
5748: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5749: {
5750: PetscFunctionBegin;
5753: PetscAssertPointer(nrm, 3);
5755: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5756: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5757: MatCheckPreallocated(mat, 1);
5759: PetscUseTypeMethod(mat, norm, type, nrm);
5760: PetscFunctionReturn(PETSC_SUCCESS);
5761: }
5763: /*
5764: This variable is used to prevent counting of MatAssemblyBegin() that
5765: are called from within a MatAssemblyEnd().
5766: */
5767: static PetscInt MatAssemblyEnd_InUse = 0;
5768: /*@
5769: MatAssemblyBegin - Begins assembling the matrix. This routine should
5770: be called after completing all calls to `MatSetValues()`.
5772: Collective
5774: Input Parameters:
5775: + mat - the matrix
5776: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5778: Level: beginner
5780: Notes:
5781: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5782: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5784: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5785: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5786: using the matrix.
5788: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5789: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5790: a global collective operation requiring all processes that share the matrix.
5792: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5793: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5794: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5796: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5797: @*/
5798: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5799: {
5800: PetscFunctionBegin;
5803: MatCheckPreallocated(mat, 1);
5804: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5805: if (mat->assembled) {
5806: mat->was_assembled = PETSC_TRUE;
5807: mat->assembled = PETSC_FALSE;
5808: }
5810: if (!MatAssemblyEnd_InUse) {
5811: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5812: PetscTryTypeMethod(mat, assemblybegin, type);
5813: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5814: } else PetscTryTypeMethod(mat, assemblybegin, type);
5815: PetscFunctionReturn(PETSC_SUCCESS);
5816: }
5818: /*@
5819: MatAssembled - Indicates if a matrix has been assembled and is ready for
5820: use; for example, in matrix-vector product.
5822: Not Collective
5824: Input Parameter:
5825: . mat - the matrix
5827: Output Parameter:
5828: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5830: Level: advanced
5832: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5833: @*/
5834: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5835: {
5836: PetscFunctionBegin;
5838: PetscAssertPointer(assembled, 2);
5839: *assembled = mat->assembled;
5840: PetscFunctionReturn(PETSC_SUCCESS);
5841: }
5843: /*@
5844: MatAssemblyEnd - Completes assembling the matrix. This routine should
5845: be called after `MatAssemblyBegin()`.
5847: Collective
5849: Input Parameters:
5850: + mat - the matrix
5851: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5853: Options Database Keys:
5854: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5855: . -mat_view ::ascii_info_detail - Prints more detailed info
5856: . -mat_view - Prints matrix in ASCII format
5857: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5858: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5859: . -display <name> - Sets display name (default is host)
5860: . -draw_pause <sec> - Sets number of seconds to pause after display
5861: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5862: . -viewer_socket_machine <machine> - Machine to use for socket
5863: . -viewer_socket_port <port> - Port number to use for socket
5864: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5866: Level: beginner
5868: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5869: @*/
5870: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5871: {
5872: static PetscInt inassm = 0;
5873: PetscBool flg = PETSC_FALSE;
5875: PetscFunctionBegin;
5879: inassm++;
5880: MatAssemblyEnd_InUse++;
5881: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5882: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5883: PetscTryTypeMethod(mat, assemblyend, type);
5884: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5885: } else PetscTryTypeMethod(mat, assemblyend, type);
5887: /* Flush assembly is not a true assembly */
5888: if (type != MAT_FLUSH_ASSEMBLY) {
5889: if (mat->num_ass) {
5890: if (!mat->symmetry_eternal) {
5891: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5892: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5893: }
5894: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5895: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5896: }
5897: mat->num_ass++;
5898: mat->assembled = PETSC_TRUE;
5899: mat->ass_nonzerostate = mat->nonzerostate;
5900: }
5902: mat->insertmode = NOT_SET_VALUES;
5903: MatAssemblyEnd_InUse--;
5904: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5905: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5906: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5908: if (mat->checksymmetryonassembly) {
5909: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5910: if (flg) {
5911: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5912: } else {
5913: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5914: }
5915: }
5916: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5917: }
5918: inassm--;
5919: PetscFunctionReturn(PETSC_SUCCESS);
5920: }
5922: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5923: /*@
5924: MatSetOption - Sets a parameter option for a matrix. Some options
5925: may be specific to certain storage formats. Some options
5926: determine how values will be inserted (or added). Sorted,
5927: row-oriented input will generally assemble the fastest. The default
5928: is row-oriented.
5930: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5932: Input Parameters:
5933: + mat - the matrix
5934: . op - the option, one of those listed below (and possibly others),
5935: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5937: Options Describing Matrix Structure:
5938: + `MAT_SPD` - symmetric positive definite
5939: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5940: . `MAT_HERMITIAN` - transpose is the complex conjugation
5941: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5942: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5943: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5944: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5946: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5947: do not need to be computed (usually at a high cost)
5949: Options For Use with `MatSetValues()`:
5950: Insert a logically dense subblock, which can be
5951: . `MAT_ROW_ORIENTED` - row-oriented (default)
5953: These options reflect the data you pass in with `MatSetValues()`; it has
5954: nothing to do with how the data is stored internally in the matrix
5955: data structure.
5957: When (re)assembling a matrix, we can restrict the input for
5958: efficiency/debugging purposes. These options include
5959: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5960: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5961: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5962: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5963: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5964: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5965: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5966: performance for very large process counts.
5967: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5968: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5969: functions, instead sending only neighbor messages.
5971: Level: intermediate
5973: Notes:
5974: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5976: Some options are relevant only for particular matrix types and
5977: are thus ignored by others. Other options are not supported by
5978: certain matrix types and will generate an error message if set.
5980: If using Fortran to compute a matrix, one may need to
5981: use the column-oriented option (or convert to the row-oriented
5982: format).
5984: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
5985: that would generate a new entry in the nonzero structure is instead
5986: ignored. Thus, if memory has not already been allocated for this particular
5987: data, then the insertion is ignored. For dense matrices, in which
5988: the entire array is allocated, no entries are ever ignored.
5989: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5991: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
5992: that would generate a new entry in the nonzero structure instead produces
5993: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5995: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
5996: that would generate a new entry that has not been preallocated will
5997: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
5998: only.) This is a useful flag when debugging matrix memory preallocation.
5999: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6001: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
6002: other processors should be dropped, rather than stashed.
6003: This is useful if you know that the "owning" processor is also
6004: always generating the correct matrix entries, so that PETSc need
6005: not transfer duplicate entries generated on another processor.
6007: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6008: searches during matrix assembly. When this flag is set, the hash table
6009: is created during the first matrix assembly. This hash table is
6010: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6011: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6012: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6013: supported by `MATMPIBAIJ` format only.
6015: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6016: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6018: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6019: a zero location in the matrix
6021: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6023: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6024: zero row routines and thus improves performance for very large process counts.
6026: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6027: part of the matrix (since they should match the upper triangular part).
6029: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6030: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6031: with finite difference schemes with non-periodic boundary conditions.
6033: Developer Note:
6034: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6035: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6036: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6037: not changed.
6039: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6040: @*/
6041: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6042: {
6043: PetscFunctionBegin;
6045: if (op > 0) {
6048: }
6050: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6052: switch (op) {
6053: case MAT_FORCE_DIAGONAL_ENTRIES:
6054: mat->force_diagonals = flg;
6055: PetscFunctionReturn(PETSC_SUCCESS);
6056: case MAT_NO_OFF_PROC_ENTRIES:
6057: mat->nooffprocentries = flg;
6058: PetscFunctionReturn(PETSC_SUCCESS);
6059: case MAT_SUBSET_OFF_PROC_ENTRIES:
6060: mat->assembly_subset = flg;
6061: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6062: #if !defined(PETSC_HAVE_MPIUNI)
6063: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6064: #endif
6065: mat->stash.first_assembly_done = PETSC_FALSE;
6066: }
6067: PetscFunctionReturn(PETSC_SUCCESS);
6068: case MAT_NO_OFF_PROC_ZERO_ROWS:
6069: mat->nooffproczerorows = flg;
6070: PetscFunctionReturn(PETSC_SUCCESS);
6071: case MAT_SPD:
6072: if (flg) {
6073: mat->spd = PETSC_BOOL3_TRUE;
6074: mat->symmetric = PETSC_BOOL3_TRUE;
6075: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6076: } else {
6077: mat->spd = PETSC_BOOL3_FALSE;
6078: }
6079: break;
6080: case MAT_SYMMETRIC:
6081: mat->symmetric = PetscBoolToBool3(flg);
6082: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6083: #if !defined(PETSC_USE_COMPLEX)
6084: mat->hermitian = PetscBoolToBool3(flg);
6085: #endif
6086: break;
6087: case MAT_HERMITIAN:
6088: mat->hermitian = PetscBoolToBool3(flg);
6089: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6090: #if !defined(PETSC_USE_COMPLEX)
6091: mat->symmetric = PetscBoolToBool3(flg);
6092: #endif
6093: break;
6094: case MAT_STRUCTURALLY_SYMMETRIC:
6095: mat->structurally_symmetric = PetscBoolToBool3(flg);
6096: break;
6097: case MAT_SYMMETRY_ETERNAL:
6098: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6099: mat->symmetry_eternal = flg;
6100: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6101: break;
6102: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6103: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6104: mat->structural_symmetry_eternal = flg;
6105: break;
6106: case MAT_SPD_ETERNAL:
6107: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6108: mat->spd_eternal = flg;
6109: if (flg) {
6110: mat->structural_symmetry_eternal = PETSC_TRUE;
6111: mat->symmetry_eternal = PETSC_TRUE;
6112: }
6113: break;
6114: case MAT_STRUCTURE_ONLY:
6115: mat->structure_only = flg;
6116: break;
6117: case MAT_SORTED_FULL:
6118: mat->sortedfull = flg;
6119: break;
6120: default:
6121: break;
6122: }
6123: PetscTryTypeMethod(mat, setoption, op, flg);
6124: PetscFunctionReturn(PETSC_SUCCESS);
6125: }
6127: /*@
6128: MatGetOption - Gets a parameter option that has been set for a matrix.
6130: Logically Collective
6132: Input Parameters:
6133: + mat - the matrix
6134: - op - the option, this only responds to certain options, check the code for which ones
6136: Output Parameter:
6137: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6139: Level: intermediate
6141: Notes:
6142: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6144: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6145: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6147: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6148: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6149: @*/
6150: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6151: {
6152: PetscFunctionBegin;
6156: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6157: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6159: switch (op) {
6160: case MAT_NO_OFF_PROC_ENTRIES:
6161: *flg = mat->nooffprocentries;
6162: break;
6163: case MAT_NO_OFF_PROC_ZERO_ROWS:
6164: *flg = mat->nooffproczerorows;
6165: break;
6166: case MAT_SYMMETRIC:
6167: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6168: break;
6169: case MAT_HERMITIAN:
6170: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6171: break;
6172: case MAT_STRUCTURALLY_SYMMETRIC:
6173: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6174: break;
6175: case MAT_SPD:
6176: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6177: break;
6178: case MAT_SYMMETRY_ETERNAL:
6179: *flg = mat->symmetry_eternal;
6180: break;
6181: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6182: *flg = mat->symmetry_eternal;
6183: break;
6184: default:
6185: break;
6186: }
6187: PetscFunctionReturn(PETSC_SUCCESS);
6188: }
6190: /*@
6191: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6192: this routine retains the old nonzero structure.
6194: Logically Collective
6196: Input Parameter:
6197: . mat - the matrix
6199: Level: intermediate
6201: Note:
6202: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6203: See the Performance chapter of the users manual for information on preallocating matrices.
6205: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6206: @*/
6207: PetscErrorCode MatZeroEntries(Mat mat)
6208: {
6209: PetscFunctionBegin;
6212: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6213: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6214: MatCheckPreallocated(mat, 1);
6216: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6217: PetscUseTypeMethod(mat, zeroentries);
6218: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6219: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6220: PetscFunctionReturn(PETSC_SUCCESS);
6221: }
6223: /*@
6224: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6225: of a set of rows and columns of a matrix.
6227: Collective
6229: Input Parameters:
6230: + mat - the matrix
6231: . numRows - the number of rows/columns to zero
6232: . rows - the global row indices
6233: . diag - value put in the diagonal of the eliminated rows
6234: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6235: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6237: Level: intermediate
6239: Notes:
6240: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6242: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6243: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6245: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6246: Krylov method to take advantage of the known solution on the zeroed rows.
6248: For the parallel case, all processes that share the matrix (i.e.,
6249: those in the communicator used for matrix creation) MUST call this
6250: routine, regardless of whether any rows being zeroed are owned by
6251: them.
6253: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6254: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6255: missing.
6257: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6258: list only rows local to itself).
6260: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6262: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6263: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6264: @*/
6265: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6266: {
6267: PetscFunctionBegin;
6270: if (numRows) PetscAssertPointer(rows, 3);
6271: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6272: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6273: MatCheckPreallocated(mat, 1);
6275: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6276: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6277: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6278: PetscFunctionReturn(PETSC_SUCCESS);
6279: }
6281: /*@
6282: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6283: of a set of rows and columns of a matrix.
6285: Collective
6287: Input Parameters:
6288: + mat - the matrix
6289: . is - the rows to zero
6290: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6291: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6292: - b - optional vector of right-hand side, that will be adjusted by provided solution
6294: Level: intermediate
6296: Note:
6297: See `MatZeroRowsColumns()` for details on how this routine operates.
6299: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6300: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6301: @*/
6302: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6303: {
6304: PetscInt numRows;
6305: const PetscInt *rows;
6307: PetscFunctionBegin;
6312: PetscCall(ISGetLocalSize(is, &numRows));
6313: PetscCall(ISGetIndices(is, &rows));
6314: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6315: PetscCall(ISRestoreIndices(is, &rows));
6316: PetscFunctionReturn(PETSC_SUCCESS);
6317: }
6319: /*@
6320: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6321: of a set of rows of a matrix.
6323: Collective
6325: Input Parameters:
6326: + mat - the matrix
6327: . numRows - the number of rows to zero
6328: . rows - the global row indices
6329: . diag - value put in the diagonal of the zeroed rows
6330: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6331: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6333: Level: intermediate
6335: Notes:
6336: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6338: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6340: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6341: Krylov method to take advantage of the known solution on the zeroed rows.
6343: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6344: from the matrix.
6346: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6347: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense
6348: formats this does not alter the nonzero structure.
6350: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6351: of the matrix is not changed the values are
6352: merely zeroed.
6354: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6355: formats can optionally remove the main diagonal entry from the
6356: nonzero structure as well, by passing 0.0 as the final argument).
6358: For the parallel case, all processes that share the matrix (i.e.,
6359: those in the communicator used for matrix creation) MUST call this
6360: routine, regardless of whether any rows being zeroed are owned by
6361: them.
6363: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6364: list only rows local to itself).
6366: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6367: owns that are to be zeroed. This saves a global synchronization in the implementation.
6369: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6370: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6371: @*/
6372: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6373: {
6374: PetscFunctionBegin;
6377: if (numRows) PetscAssertPointer(rows, 3);
6378: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6379: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6380: MatCheckPreallocated(mat, 1);
6382: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6383: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6384: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6385: PetscFunctionReturn(PETSC_SUCCESS);
6386: }
6388: /*@
6389: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6390: of a set of rows of a matrix indicated by an `IS`
6392: Collective
6394: Input Parameters:
6395: + mat - the matrix
6396: . is - index set, `IS`, of rows to remove (if `NULL` then no row is removed)
6397: . diag - value put in all diagonals of eliminated rows
6398: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6399: - b - optional vector of right-hand side, that will be adjusted by provided solution
6401: Level: intermediate
6403: Note:
6404: See `MatZeroRows()` for details on how this routine operates.
6406: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6407: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `IS`
6408: @*/
6409: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6410: {
6411: PetscInt numRows = 0;
6412: const PetscInt *rows = NULL;
6414: PetscFunctionBegin;
6417: if (is) {
6419: PetscCall(ISGetLocalSize(is, &numRows));
6420: PetscCall(ISGetIndices(is, &rows));
6421: }
6422: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6423: if (is) PetscCall(ISRestoreIndices(is, &rows));
6424: PetscFunctionReturn(PETSC_SUCCESS);
6425: }
6427: /*@
6428: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6429: of a set of rows of a matrix indicated by a `MatStencil`. These rows must be local to the process.
6431: Collective
6433: Input Parameters:
6434: + mat - the matrix
6435: . numRows - the number of rows to remove
6436: . rows - the grid coordinates (and component number when dof > 1) for matrix rows indicated by an array of `MatStencil`
6437: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6438: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6439: - b - optional vector of right-hand side, that will be adjusted by provided solution
6441: Level: intermediate
6443: Notes:
6444: See `MatZeroRows()` for details on how this routine operates.
6446: The grid coordinates are across the entire grid, not just the local portion
6448: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6449: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6450: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6451: `DM_BOUNDARY_PERIODIC` boundary type.
6453: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6454: a single value per point) you can skip filling those indices.
6456: Fortran Note:
6457: `idxm` and `idxn` should be declared as
6458: .vb
6459: MatStencil idxm(4, m)
6460: .ve
6461: and the values inserted using
6462: .vb
6463: idxm(MatStencil_i, 1) = i
6464: idxm(MatStencil_j, 1) = j
6465: idxm(MatStencil_k, 1) = k
6466: idxm(MatStencil_c, 1) = c
6467: etc
6468: .ve
6470: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6471: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6472: @*/
6473: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6474: {
6475: PetscInt dim = mat->stencil.dim;
6476: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6477: PetscInt *dims = mat->stencil.dims + 1;
6478: PetscInt *starts = mat->stencil.starts;
6479: PetscInt *dxm = (PetscInt *)rows;
6480: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6482: PetscFunctionBegin;
6485: if (numRows) PetscAssertPointer(rows, 3);
6487: PetscCall(PetscMalloc1(numRows, &jdxm));
6488: for (i = 0; i < numRows; ++i) {
6489: /* Skip unused dimensions (they are ordered k, j, i, c) */
6490: for (j = 0; j < 3 - sdim; ++j) dxm++;
6491: /* Local index in X dir */
6492: tmp = *dxm++ - starts[0];
6493: /* Loop over remaining dimensions */
6494: for (j = 0; j < dim - 1; ++j) {
6495: /* If nonlocal, set index to be negative */
6496: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6497: /* Update local index */
6498: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6499: }
6500: /* Skip component slot if necessary */
6501: if (mat->stencil.noc) dxm++;
6502: /* Local row number */
6503: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6504: }
6505: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6506: PetscCall(PetscFree(jdxm));
6507: PetscFunctionReturn(PETSC_SUCCESS);
6508: }
6510: /*@
6511: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6512: of a set of rows and columns of a matrix.
6514: Collective
6516: Input Parameters:
6517: + mat - the matrix
6518: . numRows - the number of rows/columns to remove
6519: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6520: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6521: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6522: - b - optional vector of right-hand side, that will be adjusted by provided solution
6524: Level: intermediate
6526: Notes:
6527: See `MatZeroRowsColumns()` for details on how this routine operates.
6529: The grid coordinates are across the entire grid, not just the local portion
6531: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6532: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6533: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6534: `DM_BOUNDARY_PERIODIC` boundary type.
6536: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6537: a single value per point) you can skip filling those indices.
6539: Fortran Note:
6540: `idxm` and `idxn` should be declared as
6541: .vb
6542: MatStencil idxm(4, m)
6543: .ve
6544: and the values inserted using
6545: .vb
6546: idxm(MatStencil_i, 1) = i
6547: idxm(MatStencil_j, 1) = j
6548: idxm(MatStencil_k, 1) = k
6549: idxm(MatStencil_c, 1) = c
6550: etc
6551: .ve
6553: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6554: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6555: @*/
6556: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6557: {
6558: PetscInt dim = mat->stencil.dim;
6559: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6560: PetscInt *dims = mat->stencil.dims + 1;
6561: PetscInt *starts = mat->stencil.starts;
6562: PetscInt *dxm = (PetscInt *)rows;
6563: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6565: PetscFunctionBegin;
6568: if (numRows) PetscAssertPointer(rows, 3);
6570: PetscCall(PetscMalloc1(numRows, &jdxm));
6571: for (i = 0; i < numRows; ++i) {
6572: /* Skip unused dimensions (they are ordered k, j, i, c) */
6573: for (j = 0; j < 3 - sdim; ++j) dxm++;
6574: /* Local index in X dir */
6575: tmp = *dxm++ - starts[0];
6576: /* Loop over remaining dimensions */
6577: for (j = 0; j < dim - 1; ++j) {
6578: /* If nonlocal, set index to be negative */
6579: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6580: /* Update local index */
6581: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6582: }
6583: /* Skip component slot if necessary */
6584: if (mat->stencil.noc) dxm++;
6585: /* Local row number */
6586: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6587: }
6588: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6589: PetscCall(PetscFree(jdxm));
6590: PetscFunctionReturn(PETSC_SUCCESS);
6591: }
6593: /*@
6594: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6595: of a set of rows of a matrix; using local numbering of rows.
6597: Collective
6599: Input Parameters:
6600: + mat - the matrix
6601: . numRows - the number of rows to remove
6602: . rows - the local row indices
6603: . diag - value put in all diagonals of eliminated rows
6604: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6605: - b - optional vector of right-hand side, that will be adjusted by provided solution
6607: Level: intermediate
6609: Notes:
6610: Before calling `MatZeroRowsLocal()`, the user must first set the
6611: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6613: See `MatZeroRows()` for details on how this routine operates.
6615: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6616: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6617: @*/
6618: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6619: {
6620: PetscFunctionBegin;
6623: if (numRows) PetscAssertPointer(rows, 3);
6624: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6625: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6626: MatCheckPreallocated(mat, 1);
6628: if (mat->ops->zerorowslocal) {
6629: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6630: } else {
6631: IS is, newis;
6632: const PetscInt *newRows;
6634: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6635: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6636: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6637: PetscCall(ISGetIndices(newis, &newRows));
6638: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6639: PetscCall(ISRestoreIndices(newis, &newRows));
6640: PetscCall(ISDestroy(&newis));
6641: PetscCall(ISDestroy(&is));
6642: }
6643: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6644: PetscFunctionReturn(PETSC_SUCCESS);
6645: }
6647: /*@
6648: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6649: of a set of rows of a matrix; using local numbering of rows.
6651: Collective
6653: Input Parameters:
6654: + mat - the matrix
6655: . is - index set of rows to remove
6656: . diag - value put in all diagonals of eliminated rows
6657: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6658: - b - optional vector of right-hand side, that will be adjusted by provided solution
6660: Level: intermediate
6662: Notes:
6663: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6664: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6666: See `MatZeroRows()` for details on how this routine operates.
6668: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6669: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6670: @*/
6671: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6672: {
6673: PetscInt numRows;
6674: const PetscInt *rows;
6676: PetscFunctionBegin;
6680: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6681: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6682: MatCheckPreallocated(mat, 1);
6684: PetscCall(ISGetLocalSize(is, &numRows));
6685: PetscCall(ISGetIndices(is, &rows));
6686: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6687: PetscCall(ISRestoreIndices(is, &rows));
6688: PetscFunctionReturn(PETSC_SUCCESS);
6689: }
6691: /*@
6692: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6693: of a set of rows and columns of a matrix; using local numbering of rows.
6695: Collective
6697: Input Parameters:
6698: + mat - the matrix
6699: . numRows - the number of rows to remove
6700: . rows - the global row indices
6701: . diag - value put in all diagonals of eliminated rows
6702: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6703: - b - optional vector of right-hand side, that will be adjusted by provided solution
6705: Level: intermediate
6707: Notes:
6708: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6709: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6711: See `MatZeroRowsColumns()` for details on how this routine operates.
6713: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6714: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6715: @*/
6716: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6717: {
6718: IS is, newis;
6719: const PetscInt *newRows;
6721: PetscFunctionBegin;
6724: if (numRows) PetscAssertPointer(rows, 3);
6725: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6726: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6727: MatCheckPreallocated(mat, 1);
6729: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6730: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6731: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6732: PetscCall(ISGetIndices(newis, &newRows));
6733: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6734: PetscCall(ISRestoreIndices(newis, &newRows));
6735: PetscCall(ISDestroy(&newis));
6736: PetscCall(ISDestroy(&is));
6737: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6738: PetscFunctionReturn(PETSC_SUCCESS);
6739: }
6741: /*@
6742: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6743: of a set of rows and columns of a matrix; using local numbering of rows.
6745: Collective
6747: Input Parameters:
6748: + mat - the matrix
6749: . is - index set of rows to remove
6750: . diag - value put in all diagonals of eliminated rows
6751: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6752: - b - optional vector of right-hand side, that will be adjusted by provided solution
6754: Level: intermediate
6756: Notes:
6757: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6758: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6760: See `MatZeroRowsColumns()` for details on how this routine operates.
6762: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6763: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6764: @*/
6765: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6766: {
6767: PetscInt numRows;
6768: const PetscInt *rows;
6770: PetscFunctionBegin;
6774: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6775: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6776: MatCheckPreallocated(mat, 1);
6778: PetscCall(ISGetLocalSize(is, &numRows));
6779: PetscCall(ISGetIndices(is, &rows));
6780: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6781: PetscCall(ISRestoreIndices(is, &rows));
6782: PetscFunctionReturn(PETSC_SUCCESS);
6783: }
6785: /*@
6786: MatGetSize - Returns the numbers of rows and columns in a matrix.
6788: Not Collective
6790: Input Parameter:
6791: . mat - the matrix
6793: Output Parameters:
6794: + m - the number of global rows
6795: - n - the number of global columns
6797: Level: beginner
6799: Note:
6800: Both output parameters can be `NULL` on input.
6802: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6803: @*/
6804: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6805: {
6806: PetscFunctionBegin;
6808: if (m) *m = mat->rmap->N;
6809: if (n) *n = mat->cmap->N;
6810: PetscFunctionReturn(PETSC_SUCCESS);
6811: }
6813: /*@
6814: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6815: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6817: Not Collective
6819: Input Parameter:
6820: . mat - the matrix
6822: Output Parameters:
6823: + m - the number of local rows, use `NULL` to not obtain this value
6824: - n - the number of local columns, use `NULL` to not obtain this value
6826: Level: beginner
6828: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6829: @*/
6830: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6831: {
6832: PetscFunctionBegin;
6834: if (m) PetscAssertPointer(m, 2);
6835: if (n) PetscAssertPointer(n, 3);
6836: if (m) *m = mat->rmap->n;
6837: if (n) *n = mat->cmap->n;
6838: PetscFunctionReturn(PETSC_SUCCESS);
6839: }
6841: /*@
6842: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6843: vector one multiplies this matrix by that are owned by this processor.
6845: Not Collective, unless matrix has not been allocated, then collective
6847: Input Parameter:
6848: . mat - the matrix
6850: Output Parameters:
6851: + m - the global index of the first local column, use `NULL` to not obtain this value
6852: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6854: Level: developer
6856: Notes:
6857: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6859: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6860: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6862: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6863: the local values in the matrix.
6865: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6866: Layouts](sec_matlayout) for details on matrix layouts.
6868: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6869: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6870: @*/
6871: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6872: {
6873: PetscFunctionBegin;
6876: if (m) PetscAssertPointer(m, 2);
6877: if (n) PetscAssertPointer(n, 3);
6878: MatCheckPreallocated(mat, 1);
6879: if (m) *m = mat->cmap->rstart;
6880: if (n) *n = mat->cmap->rend;
6881: PetscFunctionReturn(PETSC_SUCCESS);
6882: }
6884: /*@
6885: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6886: this MPI process.
6888: Not Collective
6890: Input Parameter:
6891: . mat - the matrix
6893: Output Parameters:
6894: + m - the global index of the first local row, use `NULL` to not obtain this value
6895: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6897: Level: beginner
6899: Notes:
6900: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6902: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6903: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6905: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6906: the local values in the matrix.
6908: The high argument is one more than the last element stored locally.
6910: For all matrices it returns the range of matrix rows associated with rows of a vector that
6911: would contain the result of a matrix vector product with this matrix. See [Matrix
6912: Layouts](sec_matlayout) for details on matrix layouts.
6914: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
6915: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6916: @*/
6917: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6918: {
6919: PetscFunctionBegin;
6922: if (m) PetscAssertPointer(m, 2);
6923: if (n) PetscAssertPointer(n, 3);
6924: MatCheckPreallocated(mat, 1);
6925: if (m) *m = mat->rmap->rstart;
6926: if (n) *n = mat->rmap->rend;
6927: PetscFunctionReturn(PETSC_SUCCESS);
6928: }
6930: /*@C
6931: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6932: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6934: Not Collective, unless matrix has not been allocated
6936: Input Parameter:
6937: . mat - the matrix
6939: Output Parameter:
6940: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
6941: where `size` is the number of MPI processes used by `mat`
6943: Level: beginner
6945: Notes:
6946: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6948: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6949: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6951: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6952: the local values in the matrix.
6954: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6955: would contain the result of a matrix vector product with this matrix. See [Matrix
6956: Layouts](sec_matlayout) for details on matrix layouts.
6958: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6959: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
6960: `DMDAGetGhostCorners()`, `DM`
6961: @*/
6962: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
6963: {
6964: PetscFunctionBegin;
6967: MatCheckPreallocated(mat, 1);
6968: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6969: PetscFunctionReturn(PETSC_SUCCESS);
6970: }
6972: /*@C
6973: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
6974: vector one multiplies this vector by that are owned by each processor.
6976: Not Collective, unless matrix has not been allocated
6978: Input Parameter:
6979: . mat - the matrix
6981: Output Parameter:
6982: . ranges - start of each processors portion plus one more than the total length at the end
6984: Level: beginner
6986: Notes:
6987: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6989: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6990: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6992: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6993: the local values in the matrix.
6995: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
6996: Layouts](sec_matlayout) for details on matrix layouts.
6998: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
6999: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
7000: `DMDAGetGhostCorners()`, `DM`
7001: @*/
7002: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
7003: {
7004: PetscFunctionBegin;
7007: MatCheckPreallocated(mat, 1);
7008: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
7009: PetscFunctionReturn(PETSC_SUCCESS);
7010: }
7012: /*@
7013: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7015: Not Collective
7017: Input Parameter:
7018: . A - matrix
7020: Output Parameters:
7021: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7022: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7024: Level: intermediate
7026: Note:
7027: You should call `ISDestroy()` on the returned `IS`
7029: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7030: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7031: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7032: details on matrix layouts.
7034: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7035: @*/
7036: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7037: {
7038: PetscErrorCode (*f)(Mat, IS *, IS *);
7040: PetscFunctionBegin;
7043: MatCheckPreallocated(A, 1);
7044: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7045: if (f) {
7046: PetscCall((*f)(A, rows, cols));
7047: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7048: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7049: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7050: }
7051: PetscFunctionReturn(PETSC_SUCCESS);
7052: }
7054: /*@
7055: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7056: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7057: to complete the factorization.
7059: Collective
7061: Input Parameters:
7062: + fact - the factorized matrix obtained with `MatGetFactor()`
7063: . mat - the matrix
7064: . row - row permutation
7065: . col - column permutation
7066: - info - structure containing
7067: .vb
7068: levels - number of levels of fill.
7069: expected fill - as ratio of original fill.
7070: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7071: missing diagonal entries)
7072: .ve
7074: Level: developer
7076: Notes:
7077: See [Matrix Factorization](sec_matfactor) for additional information.
7079: Most users should employ the `KSP` interface for linear solvers
7080: instead of working directly with matrix algebra routines such as this.
7081: See, e.g., `KSPCreate()`.
7083: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7085: Fortran Note:
7086: A valid (non-null) `info` argument must be provided
7088: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
7089: `MatGetOrdering()`, `MatFactorInfo`
7090: @*/
7091: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7092: {
7093: PetscFunctionBegin;
7098: PetscAssertPointer(info, 5);
7099: PetscAssertPointer(fact, 1);
7100: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7101: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7102: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7103: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7104: MatCheckPreallocated(mat, 2);
7106: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7107: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7108: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7109: PetscFunctionReturn(PETSC_SUCCESS);
7110: }
7112: /*@
7113: MatICCFactorSymbolic - Performs symbolic incomplete
7114: Cholesky factorization for a symmetric matrix. Use
7115: `MatCholeskyFactorNumeric()` to complete the factorization.
7117: Collective
7119: Input Parameters:
7120: + fact - the factorized matrix obtained with `MatGetFactor()`
7121: . mat - the matrix to be factored
7122: . perm - row and column permutation
7123: - info - structure containing
7124: .vb
7125: levels - number of levels of fill.
7126: expected fill - as ratio of original fill.
7127: .ve
7129: Level: developer
7131: Notes:
7132: Most users should employ the `KSP` interface for linear solvers
7133: instead of working directly with matrix algebra routines such as this.
7134: See, e.g., `KSPCreate()`.
7136: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7138: Fortran Note:
7139: A valid (non-null) `info` argument must be provided
7141: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7142: @*/
7143: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7144: {
7145: PetscFunctionBegin;
7149: PetscAssertPointer(info, 4);
7150: PetscAssertPointer(fact, 1);
7151: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7152: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7153: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7154: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7155: MatCheckPreallocated(mat, 2);
7157: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7158: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7159: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7160: PetscFunctionReturn(PETSC_SUCCESS);
7161: }
7163: /*@C
7164: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7165: points to an array of valid matrices, they may be reused to store the new
7166: submatrices.
7168: Collective
7170: Input Parameters:
7171: + mat - the matrix
7172: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7173: . irow - index set of rows to extract
7174: . icol - index set of columns to extract
7175: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7177: Output Parameter:
7178: . submat - the array of submatrices
7180: Level: advanced
7182: Notes:
7183: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7184: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7185: to extract a parallel submatrix.
7187: Some matrix types place restrictions on the row and column
7188: indices, such as that they be sorted or that they be equal to each other.
7190: The index sets may not have duplicate entries.
7192: When extracting submatrices from a parallel matrix, each processor can
7193: form a different submatrix by setting the rows and columns of its
7194: individual index sets according to the local submatrix desired.
7196: When finished using the submatrices, the user should destroy
7197: them with `MatDestroySubMatrices()`.
7199: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7200: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7202: This routine creates the matrices in submat; you should NOT create them before
7203: calling it. It also allocates the array of matrix pointers submat.
7205: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7206: request one row/column in a block, they must request all rows/columns that are in
7207: that block. For example, if the block size is 2 you cannot request just row 0 and
7208: column 0.
7210: Fortran Note:
7211: .vb
7212: Mat, pointer :: submat(:)
7213: .ve
7215: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7216: @*/
7217: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7218: {
7219: PetscInt i;
7220: PetscBool eq;
7222: PetscFunctionBegin;
7225: if (n) {
7226: PetscAssertPointer(irow, 3);
7228: PetscAssertPointer(icol, 4);
7230: }
7231: PetscAssertPointer(submat, 6);
7232: if (n && scall == MAT_REUSE_MATRIX) {
7233: PetscAssertPointer(*submat, 6);
7235: }
7236: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7237: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7238: MatCheckPreallocated(mat, 1);
7239: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7240: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7241: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7242: for (i = 0; i < n; i++) {
7243: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7244: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7245: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7246: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7247: if (mat->boundtocpu && mat->bindingpropagates) {
7248: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7249: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7250: }
7251: #endif
7252: }
7253: PetscFunctionReturn(PETSC_SUCCESS);
7254: }
7256: /*@C
7257: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of `mat` (by pairs of `IS` that may live on subcomms).
7259: Collective
7261: Input Parameters:
7262: + mat - the matrix
7263: . n - the number of submatrixes to be extracted
7264: . irow - index set of rows to extract
7265: . icol - index set of columns to extract
7266: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7268: Output Parameter:
7269: . submat - the array of submatrices
7271: Level: advanced
7273: Note:
7274: This is used by `PCGASM`
7276: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7277: @*/
7278: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7279: {
7280: PetscInt i;
7281: PetscBool eq;
7283: PetscFunctionBegin;
7286: if (n) {
7287: PetscAssertPointer(irow, 3);
7289: PetscAssertPointer(icol, 4);
7291: }
7292: PetscAssertPointer(submat, 6);
7293: if (n && scall == MAT_REUSE_MATRIX) {
7294: PetscAssertPointer(*submat, 6);
7296: }
7297: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7298: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7299: MatCheckPreallocated(mat, 1);
7301: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7302: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7303: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7304: for (i = 0; i < n; i++) {
7305: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7306: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7307: }
7308: PetscFunctionReturn(PETSC_SUCCESS);
7309: }
7311: /*@C
7312: MatDestroyMatrices - Destroys an array of matrices
7314: Collective
7316: Input Parameters:
7317: + n - the number of local matrices
7318: - mat - the matrices (this is a pointer to the array of matrices)
7320: Level: advanced
7322: Notes:
7323: Frees not only the matrices, but also the array that contains the matrices
7325: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7327: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7328: @*/
7329: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7330: {
7331: PetscInt i;
7333: PetscFunctionBegin;
7334: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7335: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7336: PetscAssertPointer(mat, 2);
7338: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7340: /* memory is allocated even if n = 0 */
7341: PetscCall(PetscFree(*mat));
7342: PetscFunctionReturn(PETSC_SUCCESS);
7343: }
7345: /*@C
7346: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7348: Collective
7350: Input Parameters:
7351: + n - the number of local matrices
7352: - mat - the matrices (this is a pointer to the array of matrices, to match the calling sequence of `MatCreateSubMatrices()`)
7354: Level: advanced
7356: Note:
7357: Frees not only the matrices, but also the array that contains the matrices
7359: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7360: @*/
7361: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7362: {
7363: Mat mat0;
7365: PetscFunctionBegin;
7366: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7367: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7368: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7369: PetscAssertPointer(mat, 2);
7371: mat0 = (*mat)[0];
7372: if (mat0 && mat0->ops->destroysubmatrices) {
7373: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7374: } else {
7375: PetscCall(MatDestroyMatrices(n, mat));
7376: }
7377: PetscFunctionReturn(PETSC_SUCCESS);
7378: }
7380: /*@
7381: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7383: Collective
7385: Input Parameter:
7386: . mat - the matrix
7388: Output Parameter:
7389: . matstruct - the sequential matrix with the nonzero structure of `mat`
7391: Level: developer
7393: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7394: @*/
7395: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7396: {
7397: PetscFunctionBegin;
7399: PetscAssertPointer(matstruct, 2);
7402: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7403: MatCheckPreallocated(mat, 1);
7405: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7406: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7407: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7408: PetscFunctionReturn(PETSC_SUCCESS);
7409: }
7411: /*@C
7412: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7414: Collective
7416: Input Parameter:
7417: . mat - the matrix
7419: Level: advanced
7421: Note:
7422: This is not needed, one can just call `MatDestroy()`
7424: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7425: @*/
7426: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7427: {
7428: PetscFunctionBegin;
7429: PetscAssertPointer(mat, 1);
7430: PetscCall(MatDestroy(mat));
7431: PetscFunctionReturn(PETSC_SUCCESS);
7432: }
7434: /*@
7435: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7436: replaces the index sets by larger ones that represent submatrices with
7437: additional overlap.
7439: Collective
7441: Input Parameters:
7442: + mat - the matrix
7443: . n - the number of index sets
7444: . is - the array of index sets (these index sets will changed during the call)
7445: - ov - the additional overlap requested
7447: Options Database Key:
7448: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7450: Level: developer
7452: Note:
7453: The computed overlap preserves the matrix block sizes when the blocks are square.
7454: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7455: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7457: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7458: @*/
7459: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7460: {
7461: PetscInt i, bs, cbs;
7463: PetscFunctionBegin;
7467: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7468: if (n) {
7469: PetscAssertPointer(is, 3);
7471: }
7472: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7473: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7474: MatCheckPreallocated(mat, 1);
7476: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7477: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7478: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7479: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7480: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7481: if (bs == cbs) {
7482: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7483: }
7484: PetscFunctionReturn(PETSC_SUCCESS);
7485: }
7487: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7489: /*@
7490: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7491: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7492: additional overlap.
7494: Collective
7496: Input Parameters:
7497: + mat - the matrix
7498: . n - the number of index sets
7499: . is - the array of index sets (these index sets will changed during the call)
7500: - ov - the additional overlap requested
7502: ` Options Database Key:
7503: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7505: Level: developer
7507: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7508: @*/
7509: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7510: {
7511: PetscInt i;
7513: PetscFunctionBegin;
7516: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7517: if (n) {
7518: PetscAssertPointer(is, 3);
7520: }
7521: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7522: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7523: MatCheckPreallocated(mat, 1);
7524: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7525: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7526: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7527: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7528: PetscFunctionReturn(PETSC_SUCCESS);
7529: }
7531: /*@
7532: MatGetBlockSize - Returns the matrix block size.
7534: Not Collective
7536: Input Parameter:
7537: . mat - the matrix
7539: Output Parameter:
7540: . bs - block size
7542: Level: intermediate
7544: Notes:
7545: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7547: If the block size has not been set yet this routine returns 1.
7549: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7550: @*/
7551: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7552: {
7553: PetscFunctionBegin;
7555: PetscAssertPointer(bs, 2);
7556: *bs = mat->rmap->bs;
7557: PetscFunctionReturn(PETSC_SUCCESS);
7558: }
7560: /*@
7561: MatGetBlockSizes - Returns the matrix block row and column sizes.
7563: Not Collective
7565: Input Parameter:
7566: . mat - the matrix
7568: Output Parameters:
7569: + rbs - row block size
7570: - cbs - column block size
7572: Level: intermediate
7574: Notes:
7575: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7576: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7578: If a block size has not been set yet this routine returns 1.
7580: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7581: @*/
7582: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7583: {
7584: PetscFunctionBegin;
7586: if (rbs) PetscAssertPointer(rbs, 2);
7587: if (cbs) PetscAssertPointer(cbs, 3);
7588: if (rbs) *rbs = mat->rmap->bs;
7589: if (cbs) *cbs = mat->cmap->bs;
7590: PetscFunctionReturn(PETSC_SUCCESS);
7591: }
7593: /*@
7594: MatSetBlockSize - Sets the matrix block size.
7596: Logically Collective
7598: Input Parameters:
7599: + mat - the matrix
7600: - bs - block size
7602: Level: intermediate
7604: Notes:
7605: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7606: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7608: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7609: is compatible with the matrix local sizes.
7611: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7612: @*/
7613: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7614: {
7615: PetscFunctionBegin;
7618: PetscCall(MatSetBlockSizes(mat, bs, bs));
7619: PetscFunctionReturn(PETSC_SUCCESS);
7620: }
7622: typedef struct {
7623: PetscInt n;
7624: IS *is;
7625: Mat *mat;
7626: PetscObjectState nonzerostate;
7627: Mat C;
7628: } EnvelopeData;
7630: static PetscErrorCode EnvelopeDataDestroy(void **ptr)
7631: {
7632: EnvelopeData *edata = (EnvelopeData *)*ptr;
7634: PetscFunctionBegin;
7635: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7636: PetscCall(PetscFree(edata->is));
7637: PetscCall(PetscFree(edata));
7638: PetscFunctionReturn(PETSC_SUCCESS);
7639: }
7641: /*@
7642: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7643: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7645: Collective
7647: Input Parameter:
7648: . mat - the matrix
7650: Level: intermediate
7652: Notes:
7653: There can be zeros within the blocks
7655: The blocks can overlap between processes, including laying on more than two processes
7657: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7658: @*/
7659: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7660: {
7661: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7662: PetscInt *diag, *odiag, sc;
7663: VecScatter scatter;
7664: PetscScalar *seqv;
7665: const PetscScalar *parv;
7666: const PetscInt *ia, *ja;
7667: PetscBool set, flag, done;
7668: Mat AA = mat, A;
7669: MPI_Comm comm;
7670: PetscMPIInt rank, size, tag;
7671: MPI_Status status;
7672: PetscContainer container;
7673: EnvelopeData *edata;
7674: Vec seq, par;
7675: IS isglobal;
7677: PetscFunctionBegin;
7679: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7680: if (!set || !flag) {
7681: /* TODO: only needs nonzero structure of transpose */
7682: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7683: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7684: }
7685: PetscCall(MatAIJGetLocalMat(AA, &A));
7686: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7687: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7689: PetscCall(MatGetLocalSize(mat, &n, NULL));
7690: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7691: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7692: PetscCallMPI(MPI_Comm_size(comm, &size));
7693: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7695: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7697: if (rank > 0) {
7698: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7699: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7700: }
7701: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7702: for (i = 0; i < n; i++) {
7703: env = PetscMax(env, ja[ia[i + 1] - 1]);
7704: II = rstart + i;
7705: if (env == II) {
7706: starts[lblocks] = tbs;
7707: sizes[lblocks++] = 1 + II - tbs;
7708: tbs = 1 + II;
7709: }
7710: }
7711: if (rank < size - 1) {
7712: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7713: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7714: }
7716: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7717: if (!set || !flag) PetscCall(MatDestroy(&AA));
7718: PetscCall(MatDestroy(&A));
7720: PetscCall(PetscNew(&edata));
7721: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7722: edata->n = lblocks;
7723: /* create IS needed for extracting blocks from the original matrix */
7724: PetscCall(PetscMalloc1(lblocks, &edata->is));
7725: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7727: /* Create the resulting inverse matrix nonzero structure with preallocation information */
7728: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7729: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7730: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7731: PetscCall(MatSetType(edata->C, MATAIJ));
7733: /* Communicate the start and end of each row, from each block to the correct rank */
7734: /* TODO: Use PetscSF instead of VecScatter */
7735: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7736: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7737: PetscCall(VecGetArrayWrite(seq, &seqv));
7738: for (PetscInt i = 0; i < lblocks; i++) {
7739: for (PetscInt j = 0; j < sizes[i]; j++) {
7740: seqv[cnt] = starts[i];
7741: seqv[cnt + 1] = starts[i] + sizes[i];
7742: cnt += 2;
7743: }
7744: }
7745: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7746: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7747: sc -= cnt;
7748: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7749: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7750: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7751: PetscCall(ISDestroy(&isglobal));
7752: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7753: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7754: PetscCall(VecScatterDestroy(&scatter));
7755: PetscCall(VecDestroy(&seq));
7756: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7757: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7758: PetscCall(VecGetArrayRead(par, &parv));
7759: cnt = 0;
7760: PetscCall(MatGetSize(mat, NULL, &n));
7761: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7762: PetscInt start, end, d = 0, od = 0;
7764: start = (PetscInt)PetscRealPart(parv[cnt]);
7765: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7766: cnt += 2;
7768: if (start < cstart) {
7769: od += cstart - start + n - cend;
7770: d += cend - cstart;
7771: } else if (start < cend) {
7772: od += n - cend;
7773: d += cend - start;
7774: } else od += n - start;
7775: if (end <= cstart) {
7776: od -= cstart - end + n - cend;
7777: d -= cend - cstart;
7778: } else if (end < cend) {
7779: od -= n - cend;
7780: d -= cend - end;
7781: } else od -= n - end;
7783: odiag[i] = od;
7784: diag[i] = d;
7785: }
7786: PetscCall(VecRestoreArrayRead(par, &parv));
7787: PetscCall(VecDestroy(&par));
7788: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7789: PetscCall(PetscFree2(diag, odiag));
7790: PetscCall(PetscFree2(sizes, starts));
7792: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7793: PetscCall(PetscContainerSetPointer(container, edata));
7794: PetscCall(PetscContainerSetCtxDestroy(container, EnvelopeDataDestroy));
7795: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7796: PetscCall(PetscObjectDereference((PetscObject)container));
7797: PetscFunctionReturn(PETSC_SUCCESS);
7798: }
7800: /*@
7801: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7803: Collective
7805: Input Parameters:
7806: + A - the matrix
7807: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7809: Output Parameter:
7810: . C - matrix with inverted block diagonal of `A`
7812: Level: advanced
7814: Note:
7815: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7817: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7818: @*/
7819: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7820: {
7821: PetscContainer container;
7822: EnvelopeData *edata;
7823: PetscObjectState nonzerostate;
7825: PetscFunctionBegin;
7826: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7827: if (!container) {
7828: PetscCall(MatComputeVariableBlockEnvelope(A));
7829: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7830: }
7831: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7832: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7833: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7834: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7836: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7837: *C = edata->C;
7839: for (PetscInt i = 0; i < edata->n; i++) {
7840: Mat D;
7841: PetscScalar *dvalues;
7843: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7844: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7845: PetscCall(MatSeqDenseInvert(D));
7846: PetscCall(MatDenseGetArray(D, &dvalues));
7847: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7848: PetscCall(MatDestroy(&D));
7849: }
7850: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7851: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7852: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7853: PetscFunctionReturn(PETSC_SUCCESS);
7854: }
7856: /*@
7857: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7859: Not Collective
7861: Input Parameters:
7862: + mat - the matrix
7863: . nblocks - the number of blocks on this process, each block can only exist on a single process
7864: - bsizes - the block sizes
7866: Level: intermediate
7868: Notes:
7869: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7871: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7873: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7874: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7875: @*/
7876: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
7877: {
7878: PetscInt ncnt = 0, nlocal;
7880: PetscFunctionBegin;
7882: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7883: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
7884: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
7885: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7886: PetscCall(PetscFree(mat->bsizes));
7887: mat->nblocks = nblocks;
7888: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7889: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7890: PetscFunctionReturn(PETSC_SUCCESS);
7891: }
7893: /*@C
7894: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7896: Not Collective; No Fortran Support
7898: Input Parameter:
7899: . mat - the matrix
7901: Output Parameters:
7902: + nblocks - the number of blocks on this process
7903: - bsizes - the block sizes
7905: Level: intermediate
7907: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7908: @*/
7909: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
7910: {
7911: PetscFunctionBegin;
7913: if (nblocks) *nblocks = mat->nblocks;
7914: if (bsizes) *bsizes = mat->bsizes;
7915: PetscFunctionReturn(PETSC_SUCCESS);
7916: }
7918: /*
7919: MatSelectVariableBlockSizes - When creating a submatrix, pass on the variable block sizes
7921: Not Collective
7923: Input Parameter:
7924: + subA - the submatrix
7925: . A - the original matrix
7926: - isrow - The `IS` of selected rows for the submatrix
7928: Level: developer
7930: .seealso: [](ch_matrices), `Mat`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7931: */
7932: static PetscErrorCode MatSelectVariableBlockSizes(Mat subA, Mat A, IS isrow)
7933: {
7934: const PetscInt *rows;
7935: PetscInt n, rStart, rEnd, Nb = 0;
7937: PetscFunctionBegin;
7938: if (!A->bsizes) PetscFunctionReturn(PETSC_SUCCESS);
7939: // The IS contains global row numbers, we cannot preserve blocks if it contains off-process entries
7940: PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
7941: PetscCall(ISGetIndices(isrow, &rows));
7942: PetscCall(ISGetLocalSize(isrow, &n));
7943: for (PetscInt i = 0; i < n; ++i) {
7944: if (rows[i] < rStart || rows[i] >= rEnd) {
7945: PetscCall(ISRestoreIndices(isrow, &rows));
7946: PetscFunctionReturn(PETSC_SUCCESS);
7947: }
7948: }
7949: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
7950: PetscBool occupied = PETSC_FALSE;
7952: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
7953: const PetscInt row = gr + br;
7955: if (i == n) break;
7956: if (rows[i] == row) {
7957: occupied = PETSC_TRUE;
7958: ++i;
7959: }
7960: while (i < n && rows[i] < row) ++i;
7961: }
7962: gr += A->bsizes[b];
7963: if (occupied) ++Nb;
7964: }
7965: subA->nblocks = Nb;
7966: PetscCall(PetscFree(subA->bsizes));
7967: PetscCall(PetscMalloc1(subA->nblocks, &subA->bsizes));
7968: PetscInt sb = 0;
7969: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
7970: if (sb < subA->nblocks) subA->bsizes[sb] = 0;
7971: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
7972: const PetscInt row = gr + br;
7974: if (i == n) break;
7975: if (rows[i] == row) {
7976: ++subA->bsizes[sb];
7977: ++i;
7978: }
7979: while (i < n && rows[i] < row) ++i;
7980: }
7981: gr += A->bsizes[b];
7982: if (sb < subA->nblocks && subA->bsizes[sb]) ++sb;
7983: }
7984: PetscCheck(sb == subA->nblocks, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of blocks %" PetscInt_FMT " != %" PetscInt_FMT, sb, subA->nblocks);
7985: PetscInt nlocal, ncnt = 0;
7986: PetscCall(MatGetLocalSize(subA, &nlocal, NULL));
7987: PetscCheck(subA->nblocks >= 0 && subA->nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", subA->nblocks, nlocal);
7988: for (PetscInt i = 0; i < subA->nblocks; i++) ncnt += subA->bsizes[i];
7989: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7990: PetscCall(ISRestoreIndices(isrow, &rows));
7991: PetscFunctionReturn(PETSC_SUCCESS);
7992: }
7994: /*@
7995: MatSetBlockSizes - Sets the matrix block row and column sizes.
7997: Logically Collective
7999: Input Parameters:
8000: + mat - the matrix
8001: . rbs - row block size
8002: - cbs - column block size
8004: Level: intermediate
8006: Notes:
8007: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
8008: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
8009: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
8011: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
8012: are compatible with the matrix local sizes.
8014: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
8016: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
8017: @*/
8018: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
8019: {
8020: PetscFunctionBegin;
8024: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
8025: if (mat->rmap->refcnt) {
8026: ISLocalToGlobalMapping l2g = NULL;
8027: PetscLayout nmap = NULL;
8029: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
8030: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
8031: PetscCall(PetscLayoutDestroy(&mat->rmap));
8032: mat->rmap = nmap;
8033: mat->rmap->mapping = l2g;
8034: }
8035: if (mat->cmap->refcnt) {
8036: ISLocalToGlobalMapping l2g = NULL;
8037: PetscLayout nmap = NULL;
8039: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
8040: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
8041: PetscCall(PetscLayoutDestroy(&mat->cmap));
8042: mat->cmap = nmap;
8043: mat->cmap->mapping = l2g;
8044: }
8045: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
8046: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
8047: PetscFunctionReturn(PETSC_SUCCESS);
8048: }
8050: /*@
8051: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
8053: Logically Collective
8055: Input Parameters:
8056: + mat - the matrix
8057: . fromRow - matrix from which to copy row block size
8058: - fromCol - matrix from which to copy column block size (can be same as fromRow)
8060: Level: developer
8062: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
8063: @*/
8064: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
8065: {
8066: PetscFunctionBegin;
8070: PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
8071: PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
8072: PetscFunctionReturn(PETSC_SUCCESS);
8073: }
8075: /*@
8076: MatResidual - Default routine to calculate the residual r = b - Ax
8078: Collective
8080: Input Parameters:
8081: + mat - the matrix
8082: . b - the right-hand-side
8083: - x - the approximate solution
8085: Output Parameter:
8086: . r - location to store the residual
8088: Level: developer
8090: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8091: @*/
8092: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8093: {
8094: PetscFunctionBegin;
8100: MatCheckPreallocated(mat, 1);
8101: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8102: if (!mat->ops->residual) {
8103: PetscCall(MatMult(mat, x, r));
8104: PetscCall(VecAYPX(r, -1.0, b));
8105: } else {
8106: PetscUseTypeMethod(mat, residual, b, x, r);
8107: }
8108: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8109: PetscFunctionReturn(PETSC_SUCCESS);
8110: }
8112: /*@C
8113: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8115: Collective
8117: Input Parameters:
8118: + mat - the matrix
8119: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8120: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8121: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8122: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8123: always used.
8125: Output Parameters:
8126: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8127: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8128: . ja - the column indices, use `NULL` if not needed
8129: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8130: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8132: Level: developer
8134: Notes:
8135: You CANNOT change any of the ia[] or ja[] values.
8137: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8139: Fortran Notes:
8140: Use
8141: .vb
8142: PetscInt, pointer :: ia(:),ja(:)
8143: call MatGetRowIJ(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8144: ! Access the ith and jth entries via ia(i) and ja(j)
8145: .ve
8147: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8148: @*/
8149: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8150: {
8151: PetscFunctionBegin;
8154: if (n) PetscAssertPointer(n, 5);
8155: if (ia) PetscAssertPointer(ia, 6);
8156: if (ja) PetscAssertPointer(ja, 7);
8157: if (done) PetscAssertPointer(done, 8);
8158: MatCheckPreallocated(mat, 1);
8159: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8160: else {
8161: if (done) *done = PETSC_TRUE;
8162: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8163: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8164: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8165: }
8166: PetscFunctionReturn(PETSC_SUCCESS);
8167: }
8169: /*@C
8170: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8172: Collective
8174: Input Parameters:
8175: + mat - the matrix
8176: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8177: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8178: symmetrized
8179: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8180: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8181: always used.
8182: . n - number of columns in the (possibly compressed) matrix
8183: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8184: - ja - the row indices
8186: Output Parameter:
8187: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8189: Level: developer
8191: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8192: @*/
8193: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8194: {
8195: PetscFunctionBegin;
8198: PetscAssertPointer(n, 5);
8199: if (ia) PetscAssertPointer(ia, 6);
8200: if (ja) PetscAssertPointer(ja, 7);
8201: PetscAssertPointer(done, 8);
8202: MatCheckPreallocated(mat, 1);
8203: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8204: else {
8205: *done = PETSC_TRUE;
8206: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8207: }
8208: PetscFunctionReturn(PETSC_SUCCESS);
8209: }
8211: /*@C
8212: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8214: Collective
8216: Input Parameters:
8217: + mat - the matrix
8218: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8219: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8220: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8221: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8222: always used.
8223: . n - size of (possibly compressed) matrix
8224: . ia - the row pointers
8225: - ja - the column indices
8227: Output Parameter:
8228: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8230: Level: developer
8232: Note:
8233: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8234: us of the array after it has been restored. If you pass `NULL`, it will
8235: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8237: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8238: @*/
8239: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8240: {
8241: PetscFunctionBegin;
8244: if (ia) PetscAssertPointer(ia, 6);
8245: if (ja) PetscAssertPointer(ja, 7);
8246: if (done) PetscAssertPointer(done, 8);
8247: MatCheckPreallocated(mat, 1);
8249: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8250: else {
8251: if (done) *done = PETSC_TRUE;
8252: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8253: if (n) *n = 0;
8254: if (ia) *ia = NULL;
8255: if (ja) *ja = NULL;
8256: }
8257: PetscFunctionReturn(PETSC_SUCCESS);
8258: }
8260: /*@C
8261: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8263: Collective
8265: Input Parameters:
8266: + mat - the matrix
8267: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8268: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8269: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8270: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8271: always used.
8273: Output Parameters:
8274: + n - size of (possibly compressed) matrix
8275: . ia - the column pointers
8276: . ja - the row indices
8277: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8279: Level: developer
8281: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8282: @*/
8283: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8284: {
8285: PetscFunctionBegin;
8288: if (ia) PetscAssertPointer(ia, 6);
8289: if (ja) PetscAssertPointer(ja, 7);
8290: PetscAssertPointer(done, 8);
8291: MatCheckPreallocated(mat, 1);
8293: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8294: else {
8295: *done = PETSC_TRUE;
8296: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8297: if (n) *n = 0;
8298: if (ia) *ia = NULL;
8299: if (ja) *ja = NULL;
8300: }
8301: PetscFunctionReturn(PETSC_SUCCESS);
8302: }
8304: /*@
8305: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8306: `MatGetColumnIJ()`.
8308: Collective
8310: Input Parameters:
8311: + mat - the matrix
8312: . ncolors - maximum color value
8313: . n - number of entries in colorarray
8314: - colorarray - array indicating color for each column
8316: Output Parameter:
8317: . iscoloring - coloring generated using colorarray information
8319: Level: developer
8321: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8322: @*/
8323: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8324: {
8325: PetscFunctionBegin;
8328: PetscAssertPointer(colorarray, 4);
8329: PetscAssertPointer(iscoloring, 5);
8330: MatCheckPreallocated(mat, 1);
8332: if (!mat->ops->coloringpatch) {
8333: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8334: } else {
8335: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8336: }
8337: PetscFunctionReturn(PETSC_SUCCESS);
8338: }
8340: /*@
8341: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8343: Logically Collective
8345: Input Parameter:
8346: . mat - the factored matrix to be reset
8348: Level: developer
8350: Notes:
8351: This routine should be used only with factored matrices formed by in-place
8352: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8353: format). This option can save memory, for example, when solving nonlinear
8354: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8355: ILU(0) preconditioner.
8357: One can specify in-place ILU(0) factorization by calling
8358: .vb
8359: PCType(pc,PCILU);
8360: PCFactorSeUseInPlace(pc);
8361: .ve
8362: or by using the options -pc_type ilu -pc_factor_in_place
8364: In-place factorization ILU(0) can also be used as a local
8365: solver for the blocks within the block Jacobi or additive Schwarz
8366: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8367: for details on setting local solver options.
8369: Most users should employ the `KSP` interface for linear solvers
8370: instead of working directly with matrix algebra routines such as this.
8371: See, e.g., `KSPCreate()`.
8373: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8374: @*/
8375: PetscErrorCode MatSetUnfactored(Mat mat)
8376: {
8377: PetscFunctionBegin;
8380: MatCheckPreallocated(mat, 1);
8381: mat->factortype = MAT_FACTOR_NONE;
8382: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8383: PetscUseTypeMethod(mat, setunfactored);
8384: PetscFunctionReturn(PETSC_SUCCESS);
8385: }
8387: /*@
8388: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8389: as the original matrix.
8391: Collective
8393: Input Parameters:
8394: + mat - the original matrix
8395: . isrow - parallel `IS` containing the rows this processor should obtain
8396: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8397: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8399: Output Parameter:
8400: . newmat - the new submatrix, of the same type as the original matrix
8402: Level: advanced
8404: Notes:
8405: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8407: Some matrix types place restrictions on the row and column indices, such
8408: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8409: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8411: The index sets may not have duplicate entries.
8413: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8414: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8415: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8416: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8417: you are finished using it.
8419: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8420: the input matrix.
8422: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8424: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8425: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8427: Example usage:
8428: Consider the following 8x8 matrix with 34 non-zero values, that is
8429: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8430: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8431: as follows
8432: .vb
8433: 1 2 0 | 0 3 0 | 0 4
8434: Proc0 0 5 6 | 7 0 0 | 8 0
8435: 9 0 10 | 11 0 0 | 12 0
8436: -------------------------------------
8437: 13 0 14 | 15 16 17 | 0 0
8438: Proc1 0 18 0 | 19 20 21 | 0 0
8439: 0 0 0 | 22 23 0 | 24 0
8440: -------------------------------------
8441: Proc2 25 26 27 | 0 0 28 | 29 0
8442: 30 0 0 | 31 32 33 | 0 34
8443: .ve
8445: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8447: .vb
8448: 2 0 | 0 3 0 | 0
8449: Proc0 5 6 | 7 0 0 | 8
8450: -------------------------------
8451: Proc1 18 0 | 19 20 21 | 0
8452: -------------------------------
8453: Proc2 26 27 | 0 0 28 | 29
8454: 0 0 | 31 32 33 | 0
8455: .ve
8457: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8458: @*/
8459: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8460: {
8461: PetscMPIInt size;
8462: Mat *local;
8463: IS iscoltmp;
8464: PetscBool flg;
8466: PetscFunctionBegin;
8470: PetscAssertPointer(newmat, 5);
8473: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8474: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8476: MatCheckPreallocated(mat, 1);
8477: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8479: if (!iscol || isrow == iscol) {
8480: PetscBool stride;
8481: PetscMPIInt grabentirematrix = 0, grab;
8482: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8483: if (stride) {
8484: PetscInt first, step, n, rstart, rend;
8485: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8486: if (step == 1) {
8487: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8488: if (rstart == first) {
8489: PetscCall(ISGetLocalSize(isrow, &n));
8490: if (n == rend - rstart) grabentirematrix = 1;
8491: }
8492: }
8493: }
8494: PetscCallMPI(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8495: if (grab) {
8496: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8497: if (cll == MAT_INITIAL_MATRIX) {
8498: *newmat = mat;
8499: PetscCall(PetscObjectReference((PetscObject)mat));
8500: }
8501: PetscFunctionReturn(PETSC_SUCCESS);
8502: }
8503: }
8505: if (!iscol) {
8506: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8507: } else {
8508: iscoltmp = iscol;
8509: }
8511: /* if original matrix is on just one processor then use submatrix generated */
8512: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8513: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8514: goto setproperties;
8515: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8516: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8517: *newmat = *local;
8518: PetscCall(PetscFree(local));
8519: goto setproperties;
8520: } else if (!mat->ops->createsubmatrix) {
8521: /* Create a new matrix type that implements the operation using the full matrix */
8522: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8523: switch (cll) {
8524: case MAT_INITIAL_MATRIX:
8525: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8526: break;
8527: case MAT_REUSE_MATRIX:
8528: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8529: break;
8530: default:
8531: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8532: }
8533: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8534: goto setproperties;
8535: }
8537: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8538: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8539: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8541: setproperties:
8542: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8543: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8544: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8545: }
8546: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8547: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8548: if (!iscol || isrow == iscol) PetscCall(MatSelectVariableBlockSizes(*newmat, mat, isrow));
8549: PetscFunctionReturn(PETSC_SUCCESS);
8550: }
8552: /*@
8553: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8555: Not Collective
8557: Input Parameters:
8558: + A - the matrix we wish to propagate options from
8559: - B - the matrix we wish to propagate options to
8561: Level: beginner
8563: Note:
8564: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8566: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8567: @*/
8568: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8569: {
8570: PetscFunctionBegin;
8573: B->symmetry_eternal = A->symmetry_eternal;
8574: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8575: B->symmetric = A->symmetric;
8576: B->structurally_symmetric = A->structurally_symmetric;
8577: B->spd = A->spd;
8578: B->hermitian = A->hermitian;
8579: PetscFunctionReturn(PETSC_SUCCESS);
8580: }
8582: /*@
8583: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8584: used during the assembly process to store values that belong to
8585: other processors.
8587: Not Collective
8589: Input Parameters:
8590: + mat - the matrix
8591: . size - the initial size of the stash.
8592: - bsize - the initial size of the block-stash(if used).
8594: Options Database Keys:
8595: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8596: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8598: Level: intermediate
8600: Notes:
8601: The block-stash is used for values set with `MatSetValuesBlocked()` while
8602: the stash is used for values set with `MatSetValues()`
8604: Run with the option -info and look for output of the form
8605: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8606: to determine the appropriate value, MM, to use for size and
8607: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8608: to determine the value, BMM to use for bsize
8610: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8611: @*/
8612: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8613: {
8614: PetscFunctionBegin;
8617: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8618: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8619: PetscFunctionReturn(PETSC_SUCCESS);
8620: }
8622: /*@
8623: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8624: the matrix
8626: Neighbor-wise Collective
8628: Input Parameters:
8629: + A - the matrix
8630: . x - the vector to be multiplied by the interpolation operator
8631: - y - the vector to be added to the result
8633: Output Parameter:
8634: . w - the resulting vector
8636: Level: intermediate
8638: Notes:
8639: `w` may be the same vector as `y`.
8641: This allows one to use either the restriction or interpolation (its transpose)
8642: matrix to do the interpolation
8644: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8645: @*/
8646: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8647: {
8648: PetscInt M, N, Ny;
8650: PetscFunctionBegin;
8655: PetscCall(MatGetSize(A, &M, &N));
8656: PetscCall(VecGetSize(y, &Ny));
8657: if (M == Ny) {
8658: PetscCall(MatMultAdd(A, x, y, w));
8659: } else {
8660: PetscCall(MatMultTransposeAdd(A, x, y, w));
8661: }
8662: PetscFunctionReturn(PETSC_SUCCESS);
8663: }
8665: /*@
8666: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8667: the matrix
8669: Neighbor-wise Collective
8671: Input Parameters:
8672: + A - the matrix
8673: - x - the vector to be interpolated
8675: Output Parameter:
8676: . y - the resulting vector
8678: Level: intermediate
8680: Note:
8681: This allows one to use either the restriction or interpolation (its transpose)
8682: matrix to do the interpolation
8684: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8685: @*/
8686: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8687: {
8688: PetscInt M, N, Ny;
8690: PetscFunctionBegin;
8694: PetscCall(MatGetSize(A, &M, &N));
8695: PetscCall(VecGetSize(y, &Ny));
8696: if (M == Ny) {
8697: PetscCall(MatMult(A, x, y));
8698: } else {
8699: PetscCall(MatMultTranspose(A, x, y));
8700: }
8701: PetscFunctionReturn(PETSC_SUCCESS);
8702: }
8704: /*@
8705: MatRestrict - $y = A*x$ or $A^T*x$
8707: Neighbor-wise Collective
8709: Input Parameters:
8710: + A - the matrix
8711: - x - the vector to be restricted
8713: Output Parameter:
8714: . y - the resulting vector
8716: Level: intermediate
8718: Note:
8719: This allows one to use either the restriction or interpolation (its transpose)
8720: matrix to do the restriction
8722: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8723: @*/
8724: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8725: {
8726: PetscInt M, N, Nx;
8728: PetscFunctionBegin;
8732: PetscCall(MatGetSize(A, &M, &N));
8733: PetscCall(VecGetSize(x, &Nx));
8734: if (M == Nx) {
8735: PetscCall(MatMultTranspose(A, x, y));
8736: } else {
8737: PetscCall(MatMult(A, x, y));
8738: }
8739: PetscFunctionReturn(PETSC_SUCCESS);
8740: }
8742: /*@
8743: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8745: Neighbor-wise Collective
8747: Input Parameters:
8748: + A - the matrix
8749: . x - the input dense matrix to be multiplied
8750: - w - the input dense matrix to be added to the result
8752: Output Parameter:
8753: . y - the output dense matrix
8755: Level: intermediate
8757: Note:
8758: This allows one to use either the restriction or interpolation (its transpose)
8759: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8760: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8762: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8763: @*/
8764: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8765: {
8766: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8767: PetscBool trans = PETSC_TRUE;
8768: MatReuse reuse = MAT_INITIAL_MATRIX;
8770: PetscFunctionBegin;
8776: PetscCall(MatGetSize(A, &M, &N));
8777: PetscCall(MatGetSize(x, &Mx, &Nx));
8778: if (N == Mx) trans = PETSC_FALSE;
8779: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8780: Mo = trans ? N : M;
8781: if (*y) {
8782: PetscCall(MatGetSize(*y, &My, &Ny));
8783: if (Mo == My && Nx == Ny) {
8784: reuse = MAT_REUSE_MATRIX;
8785: } else {
8786: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8787: PetscCall(MatDestroy(y));
8788: }
8789: }
8791: if (w && *y == w) { /* this is to minimize changes in PCMG */
8792: PetscBool flg;
8794: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8795: if (w) {
8796: PetscInt My, Ny, Mw, Nw;
8798: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8799: PetscCall(MatGetSize(*y, &My, &Ny));
8800: PetscCall(MatGetSize(w, &Mw, &Nw));
8801: if (!flg || My != Mw || Ny != Nw) w = NULL;
8802: }
8803: if (!w) {
8804: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8805: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8806: PetscCall(PetscObjectDereference((PetscObject)w));
8807: } else {
8808: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8809: }
8810: }
8811: if (!trans) {
8812: PetscCall(MatMatMult(A, x, reuse, PETSC_DETERMINE, y));
8813: } else {
8814: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DETERMINE, y));
8815: }
8816: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8817: PetscFunctionReturn(PETSC_SUCCESS);
8818: }
8820: /*@
8821: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8823: Neighbor-wise Collective
8825: Input Parameters:
8826: + A - the matrix
8827: - x - the input dense matrix
8829: Output Parameter:
8830: . y - the output dense matrix
8832: Level: intermediate
8834: Note:
8835: This allows one to use either the restriction or interpolation (its transpose)
8836: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8837: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8839: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8840: @*/
8841: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8842: {
8843: PetscFunctionBegin;
8844: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8845: PetscFunctionReturn(PETSC_SUCCESS);
8846: }
8848: /*@
8849: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8851: Neighbor-wise Collective
8853: Input Parameters:
8854: + A - the matrix
8855: - x - the input dense matrix
8857: Output Parameter:
8858: . y - the output dense matrix
8860: Level: intermediate
8862: Note:
8863: This allows one to use either the restriction or interpolation (its transpose)
8864: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
8865: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8867: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8868: @*/
8869: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8870: {
8871: PetscFunctionBegin;
8872: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8873: PetscFunctionReturn(PETSC_SUCCESS);
8874: }
8876: /*@
8877: MatGetNullSpace - retrieves the null space of a matrix.
8879: Logically Collective
8881: Input Parameters:
8882: + mat - the matrix
8883: - nullsp - the null space object
8885: Level: developer
8887: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8888: @*/
8889: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8890: {
8891: PetscFunctionBegin;
8893: PetscAssertPointer(nullsp, 2);
8894: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8895: PetscFunctionReturn(PETSC_SUCCESS);
8896: }
8898: /*@C
8899: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
8901: Logically Collective
8903: Input Parameters:
8904: + n - the number of matrices
8905: - mat - the array of matrices
8907: Output Parameters:
8908: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
8910: Level: developer
8912: Note:
8913: Call `MatRestoreNullspaces()` to provide these to another array of matrices
8915: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8916: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
8917: @*/
8918: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8919: {
8920: PetscFunctionBegin;
8921: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8922: PetscAssertPointer(mat, 2);
8923: PetscAssertPointer(nullsp, 3);
8925: PetscCall(PetscCalloc1(3 * n, nullsp));
8926: for (PetscInt i = 0; i < n; i++) {
8928: (*nullsp)[i] = mat[i]->nullsp;
8929: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
8930: (*nullsp)[n + i] = mat[i]->nearnullsp;
8931: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
8932: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
8933: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
8934: }
8935: PetscFunctionReturn(PETSC_SUCCESS);
8936: }
8938: /*@C
8939: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
8941: Logically Collective
8943: Input Parameters:
8944: + n - the number of matrices
8945: . mat - the array of matrices
8946: - nullsp - an array of null spaces
8948: Level: developer
8950: Note:
8951: Call `MatGetNullSpaces()` to create `nullsp`
8953: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8954: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
8955: @*/
8956: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8957: {
8958: PetscFunctionBegin;
8959: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8960: PetscAssertPointer(mat, 2);
8961: PetscAssertPointer(nullsp, 3);
8962: PetscAssertPointer(*nullsp, 3);
8964: for (PetscInt i = 0; i < n; i++) {
8966: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
8967: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
8968: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
8969: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
8970: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
8971: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
8972: }
8973: PetscCall(PetscFree(*nullsp));
8974: PetscFunctionReturn(PETSC_SUCCESS);
8975: }
8977: /*@
8978: MatSetNullSpace - attaches a null space to a matrix.
8980: Logically Collective
8982: Input Parameters:
8983: + mat - the matrix
8984: - nullsp - the null space object
8986: Level: advanced
8988: Notes:
8989: This null space is used by the `KSP` linear solvers to solve singular systems.
8991: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
8993: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge
8994: to zero but the linear system will still be solved in a least squares sense.
8996: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
8997: the domain of a matrix $A$ (from $R^n$ to $R^m$ ($m$ rows, $n$ columns) $R^n$ = the direct sum of the null space of $A$, $n(A)$, plus the range of $A^T$, $R(A^T)$.
8998: Similarly $R^m$ = direct sum $n(A^T) + R(A)$. Hence the linear system $A x = b$ has a solution only if $b$ in $R(A)$ (or correspondingly $b$ is orthogonal to
8999: $n(A^T))$ and if $x$ is a solution then $x + \alpha n(A)$ is a solution for any $\alpha$. The minimum norm solution is orthogonal to $n(A)$. For problems without a solution
9000: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving $A x = \hat{b}$ where $\hat{b}$ is $b$ orthogonalized to the $n(A^T)$.
9001: This $\hat{b}$ can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
9003: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one has called
9004: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9005: routine also automatically calls `MatSetTransposeNullSpace()`.
9007: The user should call `MatNullSpaceDestroy()`.
9009: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9010: `KSPSetPCSide()`
9011: @*/
9012: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9013: {
9014: PetscFunctionBegin;
9017: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9018: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9019: mat->nullsp = nullsp;
9020: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9021: PetscFunctionReturn(PETSC_SUCCESS);
9022: }
9024: /*@
9025: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
9027: Logically Collective
9029: Input Parameters:
9030: + mat - the matrix
9031: - nullsp - the null space object
9033: Level: developer
9035: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9036: @*/
9037: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9038: {
9039: PetscFunctionBegin;
9042: PetscAssertPointer(nullsp, 2);
9043: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9044: PetscFunctionReturn(PETSC_SUCCESS);
9045: }
9047: /*@
9048: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
9050: Logically Collective
9052: Input Parameters:
9053: + mat - the matrix
9054: - nullsp - the null space object
9056: Level: advanced
9058: Notes:
9059: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9061: See `MatSetNullSpace()`
9063: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9064: @*/
9065: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9066: {
9067: PetscFunctionBegin;
9070: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9071: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9072: mat->transnullsp = nullsp;
9073: PetscFunctionReturn(PETSC_SUCCESS);
9074: }
9076: /*@
9077: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9078: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9080: Logically Collective
9082: Input Parameters:
9083: + mat - the matrix
9084: - nullsp - the null space object
9086: Level: advanced
9088: Notes:
9089: Overwrites any previous near null space that may have been attached
9091: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9093: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9094: @*/
9095: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9096: {
9097: PetscFunctionBegin;
9101: MatCheckPreallocated(mat, 1);
9102: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9103: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9104: mat->nearnullsp = nullsp;
9105: PetscFunctionReturn(PETSC_SUCCESS);
9106: }
9108: /*@
9109: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9111: Not Collective
9113: Input Parameter:
9114: . mat - the matrix
9116: Output Parameter:
9117: . nullsp - the null space object, `NULL` if not set
9119: Level: advanced
9121: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9122: @*/
9123: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9124: {
9125: PetscFunctionBegin;
9128: PetscAssertPointer(nullsp, 2);
9129: MatCheckPreallocated(mat, 1);
9130: *nullsp = mat->nearnullsp;
9131: PetscFunctionReturn(PETSC_SUCCESS);
9132: }
9134: /*@
9135: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9137: Collective
9139: Input Parameters:
9140: + mat - the matrix
9141: . row - row/column permutation
9142: - info - information on desired factorization process
9144: Level: developer
9146: Notes:
9147: Probably really in-place only when level of fill is zero, otherwise allocates
9148: new space to store factored matrix and deletes previous memory.
9150: Most users should employ the `KSP` interface for linear solvers
9151: instead of working directly with matrix algebra routines such as this.
9152: See, e.g., `KSPCreate()`.
9154: Fortran Note:
9155: A valid (non-null) `info` argument must be provided
9157: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9158: @*/
9159: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9160: {
9161: PetscFunctionBegin;
9165: PetscAssertPointer(info, 3);
9166: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9167: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9168: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9169: MatCheckPreallocated(mat, 1);
9170: PetscUseTypeMethod(mat, iccfactor, row, info);
9171: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9172: PetscFunctionReturn(PETSC_SUCCESS);
9173: }
9175: /*@
9176: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9177: ghosted ones.
9179: Not Collective
9181: Input Parameters:
9182: + mat - the matrix
9183: - diag - the diagonal values, including ghost ones
9185: Level: developer
9187: Notes:
9188: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9190: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9192: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9193: @*/
9194: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9195: {
9196: PetscMPIInt size;
9198: PetscFunctionBegin;
9203: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9204: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9205: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9206: if (size == 1) {
9207: PetscInt n, m;
9208: PetscCall(VecGetSize(diag, &n));
9209: PetscCall(MatGetSize(mat, NULL, &m));
9210: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9211: PetscCall(MatDiagonalScale(mat, NULL, diag));
9212: } else {
9213: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9214: }
9215: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9216: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9217: PetscFunctionReturn(PETSC_SUCCESS);
9218: }
9220: /*@
9221: MatGetInertia - Gets the inertia from a factored matrix
9223: Collective
9225: Input Parameter:
9226: . mat - the matrix
9228: Output Parameters:
9229: + nneg - number of negative eigenvalues
9230: . nzero - number of zero eigenvalues
9231: - npos - number of positive eigenvalues
9233: Level: advanced
9235: Note:
9236: Matrix must have been factored by `MatCholeskyFactor()`
9238: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9239: @*/
9240: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9241: {
9242: PetscFunctionBegin;
9245: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9246: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9247: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9248: PetscFunctionReturn(PETSC_SUCCESS);
9249: }
9251: /*@C
9252: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9254: Neighbor-wise Collective
9256: Input Parameters:
9257: + mat - the factored matrix obtained with `MatGetFactor()`
9258: - b - the right-hand-side vectors
9260: Output Parameter:
9261: . x - the result vectors
9263: Level: developer
9265: Note:
9266: The vectors `b` and `x` cannot be the same. I.e., one cannot
9267: call `MatSolves`(A,x,x).
9269: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9270: @*/
9271: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9272: {
9273: PetscFunctionBegin;
9276: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9277: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9278: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9280: MatCheckPreallocated(mat, 1);
9281: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9282: PetscUseTypeMethod(mat, solves, b, x);
9283: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9284: PetscFunctionReturn(PETSC_SUCCESS);
9285: }
9287: /*@
9288: MatIsSymmetric - Test whether a matrix is symmetric
9290: Collective
9292: Input Parameters:
9293: + A - the matrix to test
9294: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9296: Output Parameter:
9297: . flg - the result
9299: Level: intermediate
9301: Notes:
9302: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9304: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9306: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9307: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9309: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9310: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9311: @*/
9312: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9313: {
9314: PetscFunctionBegin;
9316: PetscAssertPointer(flg, 3);
9317: if (A->symmetric != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->symmetric);
9318: else {
9319: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9320: else PetscCall(MatIsTranspose(A, A, tol, flg));
9321: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9322: }
9323: PetscFunctionReturn(PETSC_SUCCESS);
9324: }
9326: /*@
9327: MatIsHermitian - Test whether a matrix is Hermitian
9329: Collective
9331: Input Parameters:
9332: + A - the matrix to test
9333: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9335: Output Parameter:
9336: . flg - the result
9338: Level: intermediate
9340: Notes:
9341: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9343: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9345: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9346: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9348: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9349: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9350: @*/
9351: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9352: {
9353: PetscFunctionBegin;
9355: PetscAssertPointer(flg, 3);
9356: if (A->hermitian != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->hermitian);
9357: else {
9358: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9359: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9360: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9361: }
9362: PetscFunctionReturn(PETSC_SUCCESS);
9363: }
9365: /*@
9366: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9368: Not Collective
9370: Input Parameter:
9371: . A - the matrix to check
9373: Output Parameters:
9374: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9375: - flg - the result (only valid if set is `PETSC_TRUE`)
9377: Level: advanced
9379: Notes:
9380: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9381: if you want it explicitly checked
9383: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9384: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9386: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9387: @*/
9388: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9389: {
9390: PetscFunctionBegin;
9392: PetscAssertPointer(set, 2);
9393: PetscAssertPointer(flg, 3);
9394: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9395: *set = PETSC_TRUE;
9396: *flg = PetscBool3ToBool(A->symmetric);
9397: } else {
9398: *set = PETSC_FALSE;
9399: }
9400: PetscFunctionReturn(PETSC_SUCCESS);
9401: }
9403: /*@
9404: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9406: Not Collective
9408: Input Parameter:
9409: . A - the matrix to check
9411: Output Parameters:
9412: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9413: - flg - the result (only valid if set is `PETSC_TRUE`)
9415: Level: advanced
9417: Notes:
9418: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9420: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9421: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9423: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9424: @*/
9425: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9426: {
9427: PetscFunctionBegin;
9429: PetscAssertPointer(set, 2);
9430: PetscAssertPointer(flg, 3);
9431: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9432: *set = PETSC_TRUE;
9433: *flg = PetscBool3ToBool(A->spd);
9434: } else {
9435: *set = PETSC_FALSE;
9436: }
9437: PetscFunctionReturn(PETSC_SUCCESS);
9438: }
9440: /*@
9441: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9443: Not Collective
9445: Input Parameter:
9446: . A - the matrix to check
9448: Output Parameters:
9449: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9450: - flg - the result (only valid if set is `PETSC_TRUE`)
9452: Level: advanced
9454: Notes:
9455: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9456: if you want it explicitly checked
9458: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9459: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9461: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9462: @*/
9463: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9464: {
9465: PetscFunctionBegin;
9467: PetscAssertPointer(set, 2);
9468: PetscAssertPointer(flg, 3);
9469: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9470: *set = PETSC_TRUE;
9471: *flg = PetscBool3ToBool(A->hermitian);
9472: } else {
9473: *set = PETSC_FALSE;
9474: }
9475: PetscFunctionReturn(PETSC_SUCCESS);
9476: }
9478: /*@
9479: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9481: Collective
9483: Input Parameter:
9484: . A - the matrix to test
9486: Output Parameter:
9487: . flg - the result
9489: Level: intermediate
9491: Notes:
9492: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9494: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9495: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9497: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9498: @*/
9499: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9500: {
9501: PetscFunctionBegin;
9503: PetscAssertPointer(flg, 2);
9504: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9505: *flg = PetscBool3ToBool(A->structurally_symmetric);
9506: } else {
9507: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9508: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9509: }
9510: PetscFunctionReturn(PETSC_SUCCESS);
9511: }
9513: /*@
9514: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9516: Not Collective
9518: Input Parameter:
9519: . A - the matrix to check
9521: Output Parameters:
9522: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9523: - flg - the result (only valid if set is PETSC_TRUE)
9525: Level: advanced
9527: Notes:
9528: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9529: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9531: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9533: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9534: @*/
9535: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9536: {
9537: PetscFunctionBegin;
9539: PetscAssertPointer(set, 2);
9540: PetscAssertPointer(flg, 3);
9541: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9542: *set = PETSC_TRUE;
9543: *flg = PetscBool3ToBool(A->structurally_symmetric);
9544: } else {
9545: *set = PETSC_FALSE;
9546: }
9547: PetscFunctionReturn(PETSC_SUCCESS);
9548: }
9550: /*@
9551: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9552: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9554: Not Collective
9556: Input Parameter:
9557: . mat - the matrix
9559: Output Parameters:
9560: + nstash - the size of the stash
9561: . reallocs - the number of additional mallocs incurred.
9562: . bnstash - the size of the block stash
9563: - breallocs - the number of additional mallocs incurred.in the block stash
9565: Level: advanced
9567: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9568: @*/
9569: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9570: {
9571: PetscFunctionBegin;
9572: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9573: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9574: PetscFunctionReturn(PETSC_SUCCESS);
9575: }
9577: /*@
9578: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9579: parallel layout, `PetscLayout` for rows and columns
9581: Collective
9583: Input Parameter:
9584: . mat - the matrix
9586: Output Parameters:
9587: + right - (optional) vector that the matrix can be multiplied against
9588: - left - (optional) vector that the matrix vector product can be stored in
9590: Level: advanced
9592: Notes:
9593: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9595: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9597: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9598: @*/
9599: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9600: {
9601: PetscFunctionBegin;
9604: if (mat->ops->getvecs) {
9605: PetscUseTypeMethod(mat, getvecs, right, left);
9606: } else {
9607: if (right) {
9608: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9609: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9610: PetscCall(VecSetType(*right, mat->defaultvectype));
9611: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9612: if (mat->boundtocpu && mat->bindingpropagates) {
9613: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9614: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9615: }
9616: #endif
9617: }
9618: if (left) {
9619: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9620: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9621: PetscCall(VecSetType(*left, mat->defaultvectype));
9622: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9623: if (mat->boundtocpu && mat->bindingpropagates) {
9624: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9625: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9626: }
9627: #endif
9628: }
9629: }
9630: PetscFunctionReturn(PETSC_SUCCESS);
9631: }
9633: /*@
9634: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9635: with default values.
9637: Not Collective
9639: Input Parameter:
9640: . info - the `MatFactorInfo` data structure
9642: Level: developer
9644: Notes:
9645: The solvers are generally used through the `KSP` and `PC` objects, for example
9646: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9648: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9650: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9651: @*/
9652: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9653: {
9654: PetscFunctionBegin;
9655: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9656: PetscFunctionReturn(PETSC_SUCCESS);
9657: }
9659: /*@
9660: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9662: Collective
9664: Input Parameters:
9665: + mat - the factored matrix
9666: - is - the index set defining the Schur indices (0-based)
9668: Level: advanced
9670: Notes:
9671: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9673: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9675: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9677: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9678: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9679: @*/
9680: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9681: {
9682: PetscErrorCode (*f)(Mat, IS);
9684: PetscFunctionBegin;
9689: PetscCheckSameComm(mat, 1, is, 2);
9690: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9691: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9692: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9693: PetscCall(MatDestroy(&mat->schur));
9694: PetscCall((*f)(mat, is));
9695: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9696: PetscFunctionReturn(PETSC_SUCCESS);
9697: }
9699: /*@
9700: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9702: Logically Collective
9704: Input Parameters:
9705: + F - the factored matrix obtained by calling `MatGetFactor()`
9706: . S - location where to return the Schur complement, can be `NULL`
9707: - status - the status of the Schur complement matrix, can be `NULL`
9709: Level: advanced
9711: Notes:
9712: You must call `MatFactorSetSchurIS()` before calling this routine.
9714: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9716: The routine provides a copy of the Schur matrix stored within the solver data structures.
9717: The caller must destroy the object when it is no longer needed.
9718: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9720: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9722: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9724: Developer Note:
9725: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9726: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9728: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9729: @*/
9730: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9731: {
9732: PetscFunctionBegin;
9734: if (S) PetscAssertPointer(S, 2);
9735: if (status) PetscAssertPointer(status, 3);
9736: if (S) {
9737: PetscErrorCode (*f)(Mat, Mat *);
9739: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9740: if (f) {
9741: PetscCall((*f)(F, S));
9742: } else {
9743: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9744: }
9745: }
9746: if (status) *status = F->schur_status;
9747: PetscFunctionReturn(PETSC_SUCCESS);
9748: }
9750: /*@
9751: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9753: Logically Collective
9755: Input Parameters:
9756: + F - the factored matrix obtained by calling `MatGetFactor()`
9757: . S - location where to return the Schur complement, can be `NULL`
9758: - status - the status of the Schur complement matrix, can be `NULL`
9760: Level: advanced
9762: Notes:
9763: You must call `MatFactorSetSchurIS()` before calling this routine.
9765: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9767: The routine returns a the Schur Complement stored within the data structures of the solver.
9769: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9771: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9773: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9775: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9777: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9778: @*/
9779: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9780: {
9781: PetscFunctionBegin;
9783: if (S) {
9784: PetscAssertPointer(S, 2);
9785: *S = F->schur;
9786: }
9787: if (status) {
9788: PetscAssertPointer(status, 3);
9789: *status = F->schur_status;
9790: }
9791: PetscFunctionReturn(PETSC_SUCCESS);
9792: }
9794: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9795: {
9796: Mat S = F->schur;
9798: PetscFunctionBegin;
9799: switch (F->schur_status) {
9800: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9801: case MAT_FACTOR_SCHUR_INVERTED:
9802: if (S) {
9803: S->ops->solve = NULL;
9804: S->ops->matsolve = NULL;
9805: S->ops->solvetranspose = NULL;
9806: S->ops->matsolvetranspose = NULL;
9807: S->ops->solveadd = NULL;
9808: S->ops->solvetransposeadd = NULL;
9809: S->factortype = MAT_FACTOR_NONE;
9810: PetscCall(PetscFree(S->solvertype));
9811: }
9812: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9813: break;
9814: default:
9815: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9816: }
9817: PetscFunctionReturn(PETSC_SUCCESS);
9818: }
9820: /*@
9821: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9823: Logically Collective
9825: Input Parameters:
9826: + F - the factored matrix obtained by calling `MatGetFactor()`
9827: . S - location where the Schur complement is stored
9828: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9830: Level: advanced
9832: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9833: @*/
9834: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9835: {
9836: PetscFunctionBegin;
9838: if (S) {
9840: *S = NULL;
9841: }
9842: F->schur_status = status;
9843: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9844: PetscFunctionReturn(PETSC_SUCCESS);
9845: }
9847: /*@
9848: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9850: Logically Collective
9852: Input Parameters:
9853: + F - the factored matrix obtained by calling `MatGetFactor()`
9854: . rhs - location where the right-hand side of the Schur complement system is stored
9855: - sol - location where the solution of the Schur complement system has to be returned
9857: Level: advanced
9859: Notes:
9860: The sizes of the vectors should match the size of the Schur complement
9862: Must be called after `MatFactorSetSchurIS()`
9864: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9865: @*/
9866: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9867: {
9868: PetscFunctionBegin;
9875: PetscCheckSameComm(F, 1, rhs, 2);
9876: PetscCheckSameComm(F, 1, sol, 3);
9877: PetscCall(MatFactorFactorizeSchurComplement(F));
9878: switch (F->schur_status) {
9879: case MAT_FACTOR_SCHUR_FACTORED:
9880: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9881: break;
9882: case MAT_FACTOR_SCHUR_INVERTED:
9883: PetscCall(MatMultTranspose(F->schur, rhs, sol));
9884: break;
9885: default:
9886: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9887: }
9888: PetscFunctionReturn(PETSC_SUCCESS);
9889: }
9891: /*@
9892: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9894: Logically Collective
9896: Input Parameters:
9897: + F - the factored matrix obtained by calling `MatGetFactor()`
9898: . rhs - location where the right-hand side of the Schur complement system is stored
9899: - sol - location where the solution of the Schur complement system has to be returned
9901: Level: advanced
9903: Notes:
9904: The sizes of the vectors should match the size of the Schur complement
9906: Must be called after `MatFactorSetSchurIS()`
9908: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
9909: @*/
9910: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
9911: {
9912: PetscFunctionBegin;
9919: PetscCheckSameComm(F, 1, rhs, 2);
9920: PetscCheckSameComm(F, 1, sol, 3);
9921: PetscCall(MatFactorFactorizeSchurComplement(F));
9922: switch (F->schur_status) {
9923: case MAT_FACTOR_SCHUR_FACTORED:
9924: PetscCall(MatSolve(F->schur, rhs, sol));
9925: break;
9926: case MAT_FACTOR_SCHUR_INVERTED:
9927: PetscCall(MatMult(F->schur, rhs, sol));
9928: break;
9929: default:
9930: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9931: }
9932: PetscFunctionReturn(PETSC_SUCCESS);
9933: }
9935: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
9936: #if PetscDefined(HAVE_CUDA)
9937: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
9938: #endif
9940: /* Schur status updated in the interface */
9941: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
9942: {
9943: Mat S = F->schur;
9945: PetscFunctionBegin;
9946: if (S) {
9947: PetscMPIInt size;
9948: PetscBool isdense, isdensecuda;
9950: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
9951: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
9952: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
9953: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
9954: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
9955: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
9956: if (isdense) {
9957: PetscCall(MatSeqDenseInvertFactors_Private(S));
9958: } else if (isdensecuda) {
9959: #if defined(PETSC_HAVE_CUDA)
9960: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
9961: #endif
9962: }
9963: // HIP??????????????
9964: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
9965: }
9966: PetscFunctionReturn(PETSC_SUCCESS);
9967: }
9969: /*@
9970: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
9972: Logically Collective
9974: Input Parameter:
9975: . F - the factored matrix obtained by calling `MatGetFactor()`
9977: Level: advanced
9979: Notes:
9980: Must be called after `MatFactorSetSchurIS()`.
9982: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
9984: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
9985: @*/
9986: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
9987: {
9988: PetscFunctionBegin;
9991: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
9992: PetscCall(MatFactorFactorizeSchurComplement(F));
9993: PetscCall(MatFactorInvertSchurComplement_Private(F));
9994: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
9995: PetscFunctionReturn(PETSC_SUCCESS);
9996: }
9998: /*@
9999: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
10001: Logically Collective
10003: Input Parameter:
10004: . F - the factored matrix obtained by calling `MatGetFactor()`
10006: Level: advanced
10008: Note:
10009: Must be called after `MatFactorSetSchurIS()`
10011: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10012: @*/
10013: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10014: {
10015: MatFactorInfo info;
10017: PetscFunctionBegin;
10020: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10021: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10022: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10023: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10024: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10025: } else {
10026: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10027: }
10028: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10029: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10030: PetscFunctionReturn(PETSC_SUCCESS);
10031: }
10033: /*@
10034: MatPtAP - Creates the matrix product $C = P^T * A * P$
10036: Neighbor-wise Collective
10038: Input Parameters:
10039: + A - the matrix
10040: . P - the projection matrix
10041: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10042: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10043: if the result is a dense matrix this is irrelevant
10045: Output Parameter:
10046: . C - the product matrix
10048: Level: intermediate
10050: Notes:
10051: C will be created and must be destroyed by the user with `MatDestroy()`.
10053: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10055: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10057: Developer Note:
10058: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10060: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10061: @*/
10062: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10063: {
10064: PetscFunctionBegin;
10065: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10066: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10068: if (scall == MAT_INITIAL_MATRIX) {
10069: PetscCall(MatProductCreate(A, P, NULL, C));
10070: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10071: PetscCall(MatProductSetAlgorithm(*C, "default"));
10072: PetscCall(MatProductSetFill(*C, fill));
10074: (*C)->product->api_user = PETSC_TRUE;
10075: PetscCall(MatProductSetFromOptions(*C));
10076: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10077: PetscCall(MatProductSymbolic(*C));
10078: } else { /* scall == MAT_REUSE_MATRIX */
10079: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10080: }
10082: PetscCall(MatProductNumeric(*C));
10083: (*C)->symmetric = A->symmetric;
10084: (*C)->spd = A->spd;
10085: PetscFunctionReturn(PETSC_SUCCESS);
10086: }
10088: /*@
10089: MatRARt - Creates the matrix product $C = R * A * R^T$
10091: Neighbor-wise Collective
10093: Input Parameters:
10094: + A - the matrix
10095: . R - the projection matrix
10096: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10097: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10098: if the result is a dense matrix this is irrelevant
10100: Output Parameter:
10101: . C - the product matrix
10103: Level: intermediate
10105: Notes:
10106: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10108: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10110: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10111: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10112: the parallel `MatRARt()` is implemented computing the explicit transpose of `R`, which can be very expensive.
10113: We recommend using `MatPtAP()` when possible.
10115: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10117: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10118: @*/
10119: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10120: {
10121: PetscFunctionBegin;
10122: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10123: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10125: if (scall == MAT_INITIAL_MATRIX) {
10126: PetscCall(MatProductCreate(A, R, NULL, C));
10127: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10128: PetscCall(MatProductSetAlgorithm(*C, "default"));
10129: PetscCall(MatProductSetFill(*C, fill));
10131: (*C)->product->api_user = PETSC_TRUE;
10132: PetscCall(MatProductSetFromOptions(*C));
10133: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10134: PetscCall(MatProductSymbolic(*C));
10135: } else { /* scall == MAT_REUSE_MATRIX */
10136: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10137: }
10139: PetscCall(MatProductNumeric(*C));
10140: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10141: PetscFunctionReturn(PETSC_SUCCESS);
10142: }
10144: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10145: {
10146: PetscBool flg = PETSC_TRUE;
10148: PetscFunctionBegin;
10149: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10150: if (scall == MAT_INITIAL_MATRIX) {
10151: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10152: PetscCall(MatProductCreate(A, B, NULL, C));
10153: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10154: PetscCall(MatProductSetFill(*C, fill));
10155: } else { /* scall == MAT_REUSE_MATRIX */
10156: Mat_Product *product = (*C)->product;
10158: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10159: if (flg && product && product->type != ptype) {
10160: PetscCall(MatProductClear(*C));
10161: product = NULL;
10162: }
10163: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10164: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10165: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10166: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10167: product = (*C)->product;
10168: product->fill = fill;
10169: product->clear = PETSC_TRUE;
10170: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10171: flg = PETSC_FALSE;
10172: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10173: }
10174: }
10175: if (flg) {
10176: (*C)->product->api_user = PETSC_TRUE;
10177: PetscCall(MatProductSetType(*C, ptype));
10178: PetscCall(MatProductSetFromOptions(*C));
10179: PetscCall(MatProductSymbolic(*C));
10180: }
10181: PetscCall(MatProductNumeric(*C));
10182: PetscFunctionReturn(PETSC_SUCCESS);
10183: }
10185: /*@
10186: MatMatMult - Performs matrix-matrix multiplication C=A*B.
10188: Neighbor-wise Collective
10190: Input Parameters:
10191: + A - the left matrix
10192: . B - the right matrix
10193: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10194: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10195: if the result is a dense matrix this is irrelevant
10197: Output Parameter:
10198: . C - the product matrix
10200: Notes:
10201: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10203: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10204: call to this function with `MAT_INITIAL_MATRIX`.
10206: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value actually needed.
10208: In the special case where matrix `B` (and hence `C`) are dense you can create the correctly sized matrix `C` yourself and then call this routine with `MAT_REUSE_MATRIX`,
10209: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix `C` is sparse.
10211: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10213: Example of Usage:
10214: .vb
10215: MatProductCreate(A,B,NULL,&C);
10216: MatProductSetType(C,MATPRODUCT_AB);
10217: MatProductSymbolic(C);
10218: MatProductNumeric(C); // compute C=A * B
10219: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10220: MatProductNumeric(C);
10221: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10222: MatProductNumeric(C);
10223: .ve
10225: Level: intermediate
10227: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10228: @*/
10229: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10230: {
10231: PetscFunctionBegin;
10232: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10233: PetscFunctionReturn(PETSC_SUCCESS);
10234: }
10236: /*@
10237: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10239: Neighbor-wise Collective
10241: Input Parameters:
10242: + A - the left matrix
10243: . B - the right matrix
10244: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10245: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10247: Output Parameter:
10248: . C - the product matrix
10250: Options Database Key:
10251: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10252: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10253: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10255: Level: intermediate
10257: Notes:
10258: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10260: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10262: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10263: actually needed.
10265: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10266: and for pairs of `MATMPIDENSE` matrices.
10268: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`
10270: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10272: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10273: @*/
10274: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10275: {
10276: PetscFunctionBegin;
10277: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10278: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10279: PetscFunctionReturn(PETSC_SUCCESS);
10280: }
10282: /*@
10283: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10285: Neighbor-wise Collective
10287: Input Parameters:
10288: + A - the left matrix
10289: . B - the right matrix
10290: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10291: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10293: Output Parameter:
10294: . C - the product matrix
10296: Level: intermediate
10298: Notes:
10299: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10301: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
10303: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`
10305: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10306: actually needed.
10308: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10309: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10311: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10313: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10314: @*/
10315: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10316: {
10317: PetscFunctionBegin;
10318: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10319: PetscFunctionReturn(PETSC_SUCCESS);
10320: }
10322: /*@
10323: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10325: Neighbor-wise Collective
10327: Input Parameters:
10328: + A - the left matrix
10329: . B - the middle matrix
10330: . C - the right matrix
10331: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10332: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10333: if the result is a dense matrix this is irrelevant
10335: Output Parameter:
10336: . D - the product matrix
10338: Level: intermediate
10340: Notes:
10341: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10343: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10345: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`
10347: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10348: actually needed.
10350: If you have many matrices with the same non-zero structure to multiply, you
10351: should use `MAT_REUSE_MATRIX` in all calls but the first
10353: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10355: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10356: @*/
10357: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10358: {
10359: PetscFunctionBegin;
10360: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10361: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10363: if (scall == MAT_INITIAL_MATRIX) {
10364: PetscCall(MatProductCreate(A, B, C, D));
10365: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10366: PetscCall(MatProductSetAlgorithm(*D, "default"));
10367: PetscCall(MatProductSetFill(*D, fill));
10369: (*D)->product->api_user = PETSC_TRUE;
10370: PetscCall(MatProductSetFromOptions(*D));
10371: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10372: ((PetscObject)C)->type_name);
10373: PetscCall(MatProductSymbolic(*D));
10374: } else { /* user may change input matrices when REUSE */
10375: PetscCall(MatProductReplaceMats(A, B, C, *D));
10376: }
10377: PetscCall(MatProductNumeric(*D));
10378: PetscFunctionReturn(PETSC_SUCCESS);
10379: }
10381: /*@
10382: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10384: Collective
10386: Input Parameters:
10387: + mat - the matrix
10388: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10389: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10390: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10392: Output Parameter:
10393: . matredundant - redundant matrix
10395: Level: advanced
10397: Notes:
10398: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10399: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10401: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10402: calling it.
10404: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10406: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10407: @*/
10408: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10409: {
10410: MPI_Comm comm;
10411: PetscMPIInt size;
10412: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10413: Mat_Redundant *redund = NULL;
10414: PetscSubcomm psubcomm = NULL;
10415: MPI_Comm subcomm_in = subcomm;
10416: Mat *matseq;
10417: IS isrow, iscol;
10418: PetscBool newsubcomm = PETSC_FALSE;
10420: PetscFunctionBegin;
10422: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10423: PetscAssertPointer(*matredundant, 5);
10425: }
10427: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10428: if (size == 1 || nsubcomm == 1) {
10429: if (reuse == MAT_INITIAL_MATRIX) {
10430: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10431: } else {
10432: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10433: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10434: }
10435: PetscFunctionReturn(PETSC_SUCCESS);
10436: }
10438: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10439: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10440: MatCheckPreallocated(mat, 1);
10442: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10443: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10444: /* create psubcomm, then get subcomm */
10445: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10446: PetscCallMPI(MPI_Comm_size(comm, &size));
10447: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10449: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10450: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10451: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10452: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10453: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10454: newsubcomm = PETSC_TRUE;
10455: PetscCall(PetscSubcommDestroy(&psubcomm));
10456: }
10458: /* get isrow, iscol and a local sequential matrix matseq[0] */
10459: if (reuse == MAT_INITIAL_MATRIX) {
10460: mloc_sub = PETSC_DECIDE;
10461: nloc_sub = PETSC_DECIDE;
10462: if (bs < 1) {
10463: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10464: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10465: } else {
10466: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10467: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10468: }
10469: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10470: rstart = rend - mloc_sub;
10471: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10472: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10473: PetscCall(ISSetIdentity(iscol));
10474: } else { /* reuse == MAT_REUSE_MATRIX */
10475: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10476: /* retrieve subcomm */
10477: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10478: redund = (*matredundant)->redundant;
10479: isrow = redund->isrow;
10480: iscol = redund->iscol;
10481: matseq = redund->matseq;
10482: }
10483: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10485: /* get matredundant over subcomm */
10486: if (reuse == MAT_INITIAL_MATRIX) {
10487: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10489: /* create a supporting struct and attach it to C for reuse */
10490: PetscCall(PetscNew(&redund));
10491: (*matredundant)->redundant = redund;
10492: redund->isrow = isrow;
10493: redund->iscol = iscol;
10494: redund->matseq = matseq;
10495: if (newsubcomm) {
10496: redund->subcomm = subcomm;
10497: } else {
10498: redund->subcomm = MPI_COMM_NULL;
10499: }
10500: } else {
10501: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10502: }
10503: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10504: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10505: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10506: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10507: }
10508: #endif
10509: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10510: PetscFunctionReturn(PETSC_SUCCESS);
10511: }
10513: /*@C
10514: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10515: a given `Mat`. Each submatrix can span multiple procs.
10517: Collective
10519: Input Parameters:
10520: + mat - the matrix
10521: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10522: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10524: Output Parameter:
10525: . subMat - parallel sub-matrices each spanning a given `subcomm`
10527: Level: advanced
10529: Notes:
10530: The submatrix partition across processors is dictated by `subComm` a
10531: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10532: is not restricted to be grouped with consecutive original MPI processes.
10534: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10535: map directly to the layout of the original matrix [wrt the local
10536: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10537: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10538: the `subMat`. However the offDiagMat looses some columns - and this is
10539: reconstructed with `MatSetValues()`
10541: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10543: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10544: @*/
10545: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10546: {
10547: PetscMPIInt commsize, subCommSize;
10549: PetscFunctionBegin;
10550: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10551: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10552: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10554: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10555: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10556: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10557: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10558: PetscFunctionReturn(PETSC_SUCCESS);
10559: }
10561: /*@
10562: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10564: Not Collective
10566: Input Parameters:
10567: + mat - matrix to extract local submatrix from
10568: . isrow - local row indices for submatrix
10569: - iscol - local column indices for submatrix
10571: Output Parameter:
10572: . submat - the submatrix
10574: Level: intermediate
10576: Notes:
10577: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10579: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10580: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10582: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10583: `MatSetValuesBlockedLocal()` will also be implemented.
10585: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10586: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10588: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10589: @*/
10590: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10591: {
10592: PetscFunctionBegin;
10596: PetscCheckSameComm(isrow, 2, iscol, 3);
10597: PetscAssertPointer(submat, 4);
10598: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10600: if (mat->ops->getlocalsubmatrix) {
10601: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10602: } else {
10603: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10604: }
10605: PetscFunctionReturn(PETSC_SUCCESS);
10606: }
10608: /*@
10609: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10611: Not Collective
10613: Input Parameters:
10614: + mat - matrix to extract local submatrix from
10615: . isrow - local row indices for submatrix
10616: . iscol - local column indices for submatrix
10617: - submat - the submatrix
10619: Level: intermediate
10621: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10622: @*/
10623: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10624: {
10625: PetscFunctionBegin;
10629: PetscCheckSameComm(isrow, 2, iscol, 3);
10630: PetscAssertPointer(submat, 4);
10633: if (mat->ops->restorelocalsubmatrix) {
10634: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10635: } else {
10636: PetscCall(MatDestroy(submat));
10637: }
10638: *submat = NULL;
10639: PetscFunctionReturn(PETSC_SUCCESS);
10640: }
10642: /*@
10643: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10645: Collective
10647: Input Parameter:
10648: . mat - the matrix
10650: Output Parameter:
10651: . is - if any rows have zero diagonals this contains the list of them
10653: Level: developer
10655: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10656: @*/
10657: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10658: {
10659: PetscFunctionBegin;
10662: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10663: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10665: if (!mat->ops->findzerodiagonals) {
10666: Vec diag;
10667: const PetscScalar *a;
10668: PetscInt *rows;
10669: PetscInt rStart, rEnd, r, nrow = 0;
10671: PetscCall(MatCreateVecs(mat, &diag, NULL));
10672: PetscCall(MatGetDiagonal(mat, diag));
10673: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10674: PetscCall(VecGetArrayRead(diag, &a));
10675: for (r = 0; r < rEnd - rStart; ++r)
10676: if (a[r] == 0.0) ++nrow;
10677: PetscCall(PetscMalloc1(nrow, &rows));
10678: nrow = 0;
10679: for (r = 0; r < rEnd - rStart; ++r)
10680: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10681: PetscCall(VecRestoreArrayRead(diag, &a));
10682: PetscCall(VecDestroy(&diag));
10683: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10684: } else {
10685: PetscUseTypeMethod(mat, findzerodiagonals, is);
10686: }
10687: PetscFunctionReturn(PETSC_SUCCESS);
10688: }
10690: /*@
10691: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10693: Collective
10695: Input Parameter:
10696: . mat - the matrix
10698: Output Parameter:
10699: . is - contains the list of rows with off block diagonal entries
10701: Level: developer
10703: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10704: @*/
10705: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10706: {
10707: PetscFunctionBegin;
10710: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10711: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10713: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10714: PetscFunctionReturn(PETSC_SUCCESS);
10715: }
10717: /*@C
10718: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10720: Collective; No Fortran Support
10722: Input Parameter:
10723: . mat - the matrix
10725: Output Parameter:
10726: . values - the block inverses in column major order (FORTRAN-like)
10728: Level: advanced
10730: Notes:
10731: The size of the blocks is determined by the block size of the matrix.
10733: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10735: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10737: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10738: @*/
10739: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10740: {
10741: PetscFunctionBegin;
10743: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10744: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10745: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10746: PetscFunctionReturn(PETSC_SUCCESS);
10747: }
10749: /*@
10750: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10752: Collective; No Fortran Support
10754: Input Parameters:
10755: + mat - the matrix
10756: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10757: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10759: Output Parameter:
10760: . values - the block inverses in column major order (FORTRAN-like)
10762: Level: advanced
10764: Notes:
10765: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10767: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10769: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10770: @*/
10771: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10772: {
10773: PetscFunctionBegin;
10775: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10776: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10777: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10778: PetscFunctionReturn(PETSC_SUCCESS);
10779: }
10781: /*@
10782: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10784: Collective
10786: Input Parameters:
10787: + A - the matrix
10788: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10790: Level: advanced
10792: Note:
10793: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10795: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10796: @*/
10797: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10798: {
10799: const PetscScalar *vals;
10800: PetscInt *dnnz;
10801: PetscInt m, rstart, rend, bs, i, j;
10803: PetscFunctionBegin;
10804: PetscCall(MatInvertBlockDiagonal(A, &vals));
10805: PetscCall(MatGetBlockSize(A, &bs));
10806: PetscCall(MatGetLocalSize(A, &m, NULL));
10807: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10808: PetscCall(MatSetBlockSizes(C, A->rmap->bs, A->cmap->bs));
10809: PetscCall(PetscMalloc1(m / bs, &dnnz));
10810: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10811: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10812: PetscCall(PetscFree(dnnz));
10813: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10814: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10815: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10816: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10817: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10818: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10819: PetscFunctionReturn(PETSC_SUCCESS);
10820: }
10822: /*@
10823: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10824: via `MatTransposeColoringCreate()`.
10826: Collective
10828: Input Parameter:
10829: . c - coloring context
10831: Level: intermediate
10833: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10834: @*/
10835: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10836: {
10837: MatTransposeColoring matcolor = *c;
10839: PetscFunctionBegin;
10840: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10841: if (--((PetscObject)matcolor)->refct > 0) {
10842: matcolor = NULL;
10843: PetscFunctionReturn(PETSC_SUCCESS);
10844: }
10846: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10847: PetscCall(PetscFree(matcolor->rows));
10848: PetscCall(PetscFree(matcolor->den2sp));
10849: PetscCall(PetscFree(matcolor->colorforcol));
10850: PetscCall(PetscFree(matcolor->columns));
10851: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10852: PetscCall(PetscHeaderDestroy(c));
10853: PetscFunctionReturn(PETSC_SUCCESS);
10854: }
10856: /*@
10857: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
10858: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
10859: `MatTransposeColoring` to sparse `B`.
10861: Collective
10863: Input Parameters:
10864: + coloring - coloring context created with `MatTransposeColoringCreate()`
10865: - B - sparse matrix
10867: Output Parameter:
10868: . Btdense - dense matrix $B^T$
10870: Level: developer
10872: Note:
10873: These are used internally for some implementations of `MatRARt()`
10875: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10876: @*/
10877: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10878: {
10879: PetscFunctionBegin;
10884: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10885: PetscFunctionReturn(PETSC_SUCCESS);
10886: }
10888: /*@
10889: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
10890: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
10891: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10892: $C_{sp}$ from $C_{den}$.
10894: Collective
10896: Input Parameters:
10897: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10898: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
10900: Output Parameter:
10901: . Csp - sparse matrix
10903: Level: developer
10905: Note:
10906: These are used internally for some implementations of `MatRARt()`
10908: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
10909: @*/
10910: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
10911: {
10912: PetscFunctionBegin;
10917: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
10918: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
10919: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
10920: PetscFunctionReturn(PETSC_SUCCESS);
10921: }
10923: /*@
10924: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
10926: Collective
10928: Input Parameters:
10929: + mat - the matrix product C
10930: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
10932: Output Parameter:
10933: . color - the new coloring context
10935: Level: intermediate
10937: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
10938: `MatTransColoringApplyDenToSp()`
10939: @*/
10940: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
10941: {
10942: MatTransposeColoring c;
10943: MPI_Comm comm;
10945: PetscFunctionBegin;
10946: PetscAssertPointer(color, 3);
10948: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10949: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10950: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
10951: c->ctype = iscoloring->ctype;
10952: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
10953: *color = c;
10954: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10955: PetscFunctionReturn(PETSC_SUCCESS);
10956: }
10958: /*@
10959: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
10960: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
10962: Not Collective
10964: Input Parameter:
10965: . mat - the matrix
10967: Output Parameter:
10968: . state - the current state
10970: Level: intermediate
10972: Notes:
10973: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
10974: different matrices
10976: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
10978: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
10980: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
10981: @*/
10982: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
10983: {
10984: PetscFunctionBegin;
10986: *state = mat->nonzerostate;
10987: PetscFunctionReturn(PETSC_SUCCESS);
10988: }
10990: /*@
10991: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
10992: matrices from each processor
10994: Collective
10996: Input Parameters:
10997: + comm - the communicators the parallel matrix will live on
10998: . seqmat - the input sequential matrices
10999: . n - number of local columns (or `PETSC_DECIDE`)
11000: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11002: Output Parameter:
11003: . mpimat - the parallel matrix generated
11005: Level: developer
11007: Note:
11008: The number of columns of the matrix in EACH processor MUST be the same.
11010: .seealso: [](ch_matrices), `Mat`
11011: @*/
11012: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11013: {
11014: PetscMPIInt size;
11016: PetscFunctionBegin;
11017: PetscCallMPI(MPI_Comm_size(comm, &size));
11018: if (size == 1) {
11019: if (reuse == MAT_INITIAL_MATRIX) {
11020: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11021: } else {
11022: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11023: }
11024: PetscFunctionReturn(PETSC_SUCCESS);
11025: }
11027: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
11029: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11030: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11031: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11032: PetscFunctionReturn(PETSC_SUCCESS);
11033: }
11035: /*@
11036: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
11038: Collective
11040: Input Parameters:
11041: + A - the matrix to create subdomains from
11042: - N - requested number of subdomains
11044: Output Parameters:
11045: + n - number of subdomains resulting on this MPI process
11046: - iss - `IS` list with indices of subdomains on this MPI process
11048: Level: advanced
11050: Note:
11051: The number of subdomains must be smaller than the communicator size
11053: .seealso: [](ch_matrices), `Mat`, `IS`
11054: @*/
11055: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11056: {
11057: MPI_Comm comm, subcomm;
11058: PetscMPIInt size, rank, color;
11059: PetscInt rstart, rend, k;
11061: PetscFunctionBegin;
11062: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11063: PetscCallMPI(MPI_Comm_size(comm, &size));
11064: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11065: PetscCheck(N >= 1 && N < size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11066: *n = 1;
11067: k = size / N + (size % N > 0); /* There are up to k ranks to a color */
11068: color = rank / k;
11069: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11070: PetscCall(PetscMalloc1(1, iss));
11071: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11072: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11073: PetscCallMPI(MPI_Comm_free(&subcomm));
11074: PetscFunctionReturn(PETSC_SUCCESS);
11075: }
11077: /*@
11078: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11080: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11081: If they are not the same, uses `MatMatMatMult()`.
11083: Once the coarse grid problem is constructed, correct for interpolation operators
11084: that are not of full rank, which can legitimately happen in the case of non-nested
11085: geometric multigrid.
11087: Input Parameters:
11088: + restrct - restriction operator
11089: . dA - fine grid matrix
11090: . interpolate - interpolation operator
11091: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11092: - fill - expected fill, use `PETSC_DETERMINE` or `PETSC_DETERMINE` if you do not have a good estimate
11094: Output Parameter:
11095: . A - the Galerkin coarse matrix
11097: Options Database Key:
11098: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
11100: Level: developer
11102: Note:
11103: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
11105: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11106: @*/
11107: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11108: {
11109: IS zerorows;
11110: Vec diag;
11112: PetscFunctionBegin;
11113: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11114: /* Construct the coarse grid matrix */
11115: if (interpolate == restrct) {
11116: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11117: } else {
11118: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11119: }
11121: /* If the interpolation matrix is not of full rank, A will have zero rows.
11122: This can legitimately happen in the case of non-nested geometric multigrid.
11123: In that event, we set the rows of the matrix to the rows of the identity,
11124: ignoring the equations (as the RHS will also be zero). */
11126: PetscCall(MatFindZeroRows(*A, &zerorows));
11128: if (zerorows != NULL) { /* if there are any zero rows */
11129: PetscCall(MatCreateVecs(*A, &diag, NULL));
11130: PetscCall(MatGetDiagonal(*A, diag));
11131: PetscCall(VecISSet(diag, zerorows, 1.0));
11132: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11133: PetscCall(VecDestroy(&diag));
11134: PetscCall(ISDestroy(&zerorows));
11135: }
11136: PetscFunctionReturn(PETSC_SUCCESS);
11137: }
11139: /*@C
11140: MatSetOperation - Allows user to set a matrix operation for any matrix type
11142: Logically Collective
11144: Input Parameters:
11145: + mat - the matrix
11146: . op - the name of the operation
11147: - f - the function that provides the operation
11149: Level: developer
11151: Example Usage:
11152: .vb
11153: extern PetscErrorCode usermult(Mat, Vec, Vec);
11155: PetscCall(MatCreateXXX(comm, ..., &A));
11156: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscVoidFn *)usermult));
11157: .ve
11159: Notes:
11160: See the file `include/petscmat.h` for a complete list of matrix
11161: operations, which all have the form MATOP_<OPERATION>, where
11162: <OPERATION> is the name (in all capital letters) of the
11163: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11165: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11166: sequence as the usual matrix interface routines, since they
11167: are intended to be accessed via the usual matrix interface
11168: routines, e.g.,
11169: .vb
11170: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11171: .ve
11173: In particular each function MUST return `PETSC_SUCCESS` on success and
11174: nonzero on failure.
11176: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11178: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11179: @*/
11180: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, void (*f)(void))
11181: {
11182: PetscFunctionBegin;
11184: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))mat->ops->view) mat->ops->viewnative = mat->ops->view;
11185: (((void (**)(void))mat->ops)[op]) = f;
11186: PetscFunctionReturn(PETSC_SUCCESS);
11187: }
11189: /*@C
11190: MatGetOperation - Gets a matrix operation for any matrix type.
11192: Not Collective
11194: Input Parameters:
11195: + mat - the matrix
11196: - op - the name of the operation
11198: Output Parameter:
11199: . f - the function that provides the operation
11201: Level: developer
11203: Example Usage:
11204: .vb
11205: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11207: MatGetOperation(A, MATOP_MULT, (void (**)(void))&usermult);
11208: .ve
11210: Notes:
11211: See the file include/petscmat.h for a complete list of matrix
11212: operations, which all have the form MATOP_<OPERATION>, where
11213: <OPERATION> is the name (in all capital letters) of the
11214: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11216: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11218: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11219: @*/
11220: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, void (**f)(void))
11221: {
11222: PetscFunctionBegin;
11224: *f = (((void (**)(void))mat->ops)[op]);
11225: PetscFunctionReturn(PETSC_SUCCESS);
11226: }
11228: /*@
11229: MatHasOperation - Determines whether the given matrix supports the particular operation.
11231: Not Collective
11233: Input Parameters:
11234: + mat - the matrix
11235: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11237: Output Parameter:
11238: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11240: Level: advanced
11242: Note:
11243: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11245: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11246: @*/
11247: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11248: {
11249: PetscFunctionBegin;
11251: PetscAssertPointer(has, 3);
11252: if (mat->ops->hasoperation) {
11253: PetscUseTypeMethod(mat, hasoperation, op, has);
11254: } else {
11255: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11256: else {
11257: *has = PETSC_FALSE;
11258: if (op == MATOP_CREATE_SUBMATRIX) {
11259: PetscMPIInt size;
11261: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11262: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11263: }
11264: }
11265: }
11266: PetscFunctionReturn(PETSC_SUCCESS);
11267: }
11269: /*@
11270: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11272: Collective
11274: Input Parameter:
11275: . mat - the matrix
11277: Output Parameter:
11278: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11280: Level: beginner
11282: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11283: @*/
11284: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11285: {
11286: PetscFunctionBegin;
11289: PetscAssertPointer(cong, 2);
11290: if (!mat->rmap || !mat->cmap) {
11291: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11292: PetscFunctionReturn(PETSC_SUCCESS);
11293: }
11294: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11295: PetscCall(PetscLayoutSetUp(mat->rmap));
11296: PetscCall(PetscLayoutSetUp(mat->cmap));
11297: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11298: if (*cong) mat->congruentlayouts = 1;
11299: else mat->congruentlayouts = 0;
11300: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11301: PetscFunctionReturn(PETSC_SUCCESS);
11302: }
11304: PetscErrorCode MatSetInf(Mat A)
11305: {
11306: PetscFunctionBegin;
11307: PetscUseTypeMethod(A, setinf);
11308: PetscFunctionReturn(PETSC_SUCCESS);
11309: }
11311: /*@
11312: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11313: and possibly removes small values from the graph structure.
11315: Collective
11317: Input Parameters:
11318: + A - the matrix
11319: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11320: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11321: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11322: . num_idx - size of 'index' array
11323: - index - array of block indices to use for graph strength of connection weight
11325: Output Parameter:
11326: . graph - the resulting graph
11328: Level: advanced
11330: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11331: @*/
11332: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11333: {
11334: PetscFunctionBegin;
11338: PetscAssertPointer(graph, 7);
11339: PetscCall(PetscLogEventBegin(MAT_CreateGraph, A, 0, 0, 0));
11340: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11341: PetscCall(PetscLogEventEnd(MAT_CreateGraph, A, 0, 0, 0));
11342: PetscFunctionReturn(PETSC_SUCCESS);
11343: }
11345: /*@
11346: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11347: meaning the same memory is used for the matrix, and no new memory is allocated.
11349: Collective
11351: Input Parameters:
11352: + A - the matrix
11353: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11355: Level: intermediate
11357: Developer Note:
11358: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11359: of the arrays in the data structure are unneeded.
11361: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11362: @*/
11363: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11364: {
11365: PetscFunctionBegin;
11367: PetscUseTypeMethod(A, eliminatezeros, keep);
11368: PetscFunctionReturn(PETSC_SUCCESS);
11369: }
11371: /*@C
11372: MatGetCurrentMemType - Get the memory location of the matrix
11374: Not Collective, but the result will be the same on all MPI processes
11376: Input Parameter:
11377: . A - the matrix whose memory type we are checking
11379: Output Parameter:
11380: . m - the memory type
11382: Level: intermediate
11384: .seealso: [](ch_matrices), `Mat`, `MatBoundToCPU()`, `PetscMemType`
11385: @*/
11386: PetscErrorCode MatGetCurrentMemType(Mat A, PetscMemType *m)
11387: {
11388: PetscFunctionBegin;
11390: PetscAssertPointer(m, 2);
11391: if (A->ops->getcurrentmemtype) PetscUseTypeMethod(A, getcurrentmemtype, m);
11392: else *m = PETSC_MEMTYPE_HOST;
11393: PetscFunctionReturn(PETSC_SUCCESS);
11394: }