Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_CreateGraph;
40: PetscLogEvent MAT_SetValuesBatch;
41: PetscLogEvent MAT_ViennaCLCopyToGPU;
42: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
43: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
44: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
45: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
46: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
47: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
49: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
51: /*@
52: MatSetRandom - Sets all components of a matrix to random numbers.
54: Logically Collective
56: Input Parameters:
57: + x - the matrix
58: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
59: it will create one internally.
61: Example:
62: .vb
63: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
64: MatSetRandom(x,rctx);
65: PetscRandomDestroy(rctx);
66: .ve
68: Level: intermediate
70: Notes:
71: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
73: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
75: It generates an error if used on unassembled sparse matrices that have not been preallocated.
77: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
78: @*/
79: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
80: {
81: PetscRandom randObj = NULL;
83: PetscFunctionBegin;
87: MatCheckPreallocated(x, 1);
89: if (!rctx) {
90: MPI_Comm comm;
91: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
92: PetscCall(PetscRandomCreate(comm, &randObj));
93: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
94: PetscCall(PetscRandomSetFromOptions(randObj));
95: rctx = randObj;
96: }
97: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
98: PetscUseTypeMethod(x, setrandom, rctx);
99: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
101: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
103: PetscCall(PetscRandomDestroy(&randObj));
104: PetscFunctionReturn(PETSC_SUCCESS);
105: }
107: /*@
108: MatCopyHashToXAIJ - copy hash table entries into an XAIJ matrix type
110: Logically Collective
112: Input Parameter:
113: . A - A matrix in unassembled, hash table form
115: Output Parameter:
116: . B - The XAIJ matrix. This can either be `A` or some matrix of equivalent size, e.g. obtained from `A` via `MatDuplicate()`
118: Example:
119: .vb
120: PetscCall(MatDuplicate(A, MAT_DO_NOT_COPY_VALUES, &B));
121: PetscCall(MatCopyHashToXAIJ(A, B));
122: .ve
124: Level: advanced
126: Notes:
127: If `B` is `A`, then the hash table data structure will be destroyed. `B` is assembled
129: .seealso: [](ch_matrices), `Mat`, `MAT_USE_HASH_TABLE`
130: @*/
131: PetscErrorCode MatCopyHashToXAIJ(Mat A, Mat B)
132: {
133: PetscFunctionBegin;
135: PetscUseTypeMethod(A, copyhashtoxaij, B);
136: PetscFunctionReturn(PETSC_SUCCESS);
137: }
139: /*@
140: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
142: Logically Collective
144: Input Parameter:
145: . mat - the factored matrix
147: Output Parameters:
148: + pivot - the pivot value computed
149: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
150: the share the matrix
152: Level: advanced
154: Notes:
155: This routine does not work for factorizations done with external packages.
157: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
159: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
161: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
162: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
163: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
164: @*/
165: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
166: {
167: PetscFunctionBegin;
169: PetscAssertPointer(pivot, 2);
170: PetscAssertPointer(row, 3);
171: *pivot = mat->factorerror_zeropivot_value;
172: *row = mat->factorerror_zeropivot_row;
173: PetscFunctionReturn(PETSC_SUCCESS);
174: }
176: /*@
177: MatFactorGetError - gets the error code from a factorization
179: Logically Collective
181: Input Parameter:
182: . mat - the factored matrix
184: Output Parameter:
185: . err - the error code
187: Level: advanced
189: Note:
190: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
192: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
193: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
194: @*/
195: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
196: {
197: PetscFunctionBegin;
199: PetscAssertPointer(err, 2);
200: *err = mat->factorerrortype;
201: PetscFunctionReturn(PETSC_SUCCESS);
202: }
204: /*@
205: MatFactorClearError - clears the error code in a factorization
207: Logically Collective
209: Input Parameter:
210: . mat - the factored matrix
212: Level: developer
214: Note:
215: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
217: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
218: `MatGetErrorCode()`, `MatFactorError`
219: @*/
220: PetscErrorCode MatFactorClearError(Mat mat)
221: {
222: PetscFunctionBegin;
224: mat->factorerrortype = MAT_FACTOR_NOERROR;
225: mat->factorerror_zeropivot_value = 0.0;
226: mat->factorerror_zeropivot_row = 0;
227: PetscFunctionReturn(PETSC_SUCCESS);
228: }
230: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
231: {
232: Vec r, l;
233: const PetscScalar *al;
234: PetscInt i, nz, gnz, N, n, st;
236: PetscFunctionBegin;
237: PetscCall(MatCreateVecs(mat, &r, &l));
238: if (!cols) { /* nonzero rows */
239: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
240: PetscCall(MatGetSize(mat, &N, NULL));
241: PetscCall(MatGetLocalSize(mat, &n, NULL));
242: PetscCall(VecSet(l, 0.0));
243: PetscCall(VecSetRandom(r, NULL));
244: PetscCall(MatMult(mat, r, l));
245: PetscCall(VecGetArrayRead(l, &al));
246: } else { /* nonzero columns */
247: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
248: PetscCall(MatGetSize(mat, NULL, &N));
249: PetscCall(MatGetLocalSize(mat, NULL, &n));
250: PetscCall(VecSet(r, 0.0));
251: PetscCall(VecSetRandom(l, NULL));
252: PetscCall(MatMultTranspose(mat, l, r));
253: PetscCall(VecGetArrayRead(r, &al));
254: }
255: if (tol <= 0.0) {
256: for (i = 0, nz = 0; i < n; i++)
257: if (al[i] != 0.0) nz++;
258: } else {
259: for (i = 0, nz = 0; i < n; i++)
260: if (PetscAbsScalar(al[i]) > tol) nz++;
261: }
262: PetscCallMPI(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
263: if (gnz != N) {
264: PetscInt *nzr;
265: PetscCall(PetscMalloc1(nz, &nzr));
266: if (nz) {
267: if (tol < 0) {
268: for (i = 0, nz = 0; i < n; i++)
269: if (al[i] != 0.0) nzr[nz++] = i + st;
270: } else {
271: for (i = 0, nz = 0; i < n; i++)
272: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
273: }
274: }
275: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
276: } else *nonzero = NULL;
277: if (!cols) { /* nonzero rows */
278: PetscCall(VecRestoreArrayRead(l, &al));
279: } else {
280: PetscCall(VecRestoreArrayRead(r, &al));
281: }
282: PetscCall(VecDestroy(&l));
283: PetscCall(VecDestroy(&r));
284: PetscFunctionReturn(PETSC_SUCCESS);
285: }
287: /*@
288: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
290: Input Parameter:
291: . mat - the matrix
293: Output Parameter:
294: . keptrows - the rows that are not completely zero
296: Level: intermediate
298: Note:
299: `keptrows` is set to `NULL` if all rows are nonzero.
301: Developer Note:
302: If `keptrows` is not `NULL`, it must be sorted.
304: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
305: @*/
306: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
307: {
308: PetscFunctionBegin;
311: PetscAssertPointer(keptrows, 2);
312: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
313: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
314: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
315: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
316: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
317: PetscFunctionReturn(PETSC_SUCCESS);
318: }
320: /*@
321: MatFindZeroRows - Locate all rows that are completely zero in the matrix
323: Input Parameter:
324: . mat - the matrix
326: Output Parameter:
327: . zerorows - the rows that are completely zero
329: Level: intermediate
331: Note:
332: `zerorows` is set to `NULL` if no rows are zero.
334: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
335: @*/
336: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
337: {
338: IS keptrows;
339: PetscInt m, n;
341: PetscFunctionBegin;
344: PetscAssertPointer(zerorows, 2);
345: PetscCall(MatFindNonzeroRows(mat, &keptrows));
346: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
347: In keeping with this convention, we set zerorows to NULL if there are no zero
348: rows. */
349: if (keptrows == NULL) {
350: *zerorows = NULL;
351: } else {
352: PetscCall(MatGetOwnershipRange(mat, &m, &n));
353: PetscCall(ISComplement(keptrows, m, n, zerorows));
354: PetscCall(ISDestroy(&keptrows));
355: }
356: PetscFunctionReturn(PETSC_SUCCESS);
357: }
359: /*@
360: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
362: Not Collective
364: Input Parameter:
365: . A - the matrix
367: Output Parameter:
368: . a - the diagonal part (which is a SEQUENTIAL matrix)
370: Level: advanced
372: Notes:
373: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
375: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
377: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
378: @*/
379: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
380: {
381: PetscFunctionBegin;
384: PetscAssertPointer(a, 2);
385: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
386: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
387: else {
388: PetscMPIInt size;
390: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
391: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
392: *a = A;
393: }
394: PetscFunctionReturn(PETSC_SUCCESS);
395: }
397: /*@
398: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
400: Collective
402: Input Parameter:
403: . mat - the matrix
405: Output Parameter:
406: . trace - the sum of the diagonal entries
408: Level: advanced
410: .seealso: [](ch_matrices), `Mat`
411: @*/
412: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
413: {
414: Vec diag;
416: PetscFunctionBegin;
418: PetscAssertPointer(trace, 2);
419: PetscCall(MatCreateVecs(mat, &diag, NULL));
420: PetscCall(MatGetDiagonal(mat, diag));
421: PetscCall(VecSum(diag, trace));
422: PetscCall(VecDestroy(&diag));
423: PetscFunctionReturn(PETSC_SUCCESS);
424: }
426: /*@
427: MatRealPart - Zeros out the imaginary part of the matrix
429: Logically Collective
431: Input Parameter:
432: . mat - the matrix
434: Level: advanced
436: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
437: @*/
438: PetscErrorCode MatRealPart(Mat mat)
439: {
440: PetscFunctionBegin;
443: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
444: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
445: MatCheckPreallocated(mat, 1);
446: PetscUseTypeMethod(mat, realpart);
447: PetscFunctionReturn(PETSC_SUCCESS);
448: }
450: /*@C
451: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
453: Collective
455: Input Parameter:
456: . mat - the matrix
458: Output Parameters:
459: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
460: - ghosts - the global indices of the ghost points
462: Level: advanced
464: Note:
465: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
467: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
468: @*/
469: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
470: {
471: PetscFunctionBegin;
474: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
475: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
476: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
477: else {
478: if (nghosts) *nghosts = 0;
479: if (ghosts) *ghosts = NULL;
480: }
481: PetscFunctionReturn(PETSC_SUCCESS);
482: }
484: /*@
485: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
487: Logically Collective
489: Input Parameter:
490: . mat - the matrix
492: Level: advanced
494: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
495: @*/
496: PetscErrorCode MatImaginaryPart(Mat mat)
497: {
498: PetscFunctionBegin;
501: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
502: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
503: MatCheckPreallocated(mat, 1);
504: PetscUseTypeMethod(mat, imaginarypart);
505: PetscFunctionReturn(PETSC_SUCCESS);
506: }
508: /*@
509: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure
511: Not Collective
513: Input Parameter:
514: . mat - the matrix
516: Output Parameters:
517: + missing - is any diagonal entry missing
518: - dd - first diagonal entry that is missing (optional) on this process
520: Level: advanced
522: Note:
523: This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value
525: .seealso: [](ch_matrices), `Mat`
526: @*/
527: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
528: {
529: PetscFunctionBegin;
532: PetscAssertPointer(missing, 2);
533: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
534: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
535: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
536: PetscFunctionReturn(PETSC_SUCCESS);
537: }
539: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
540: /*@C
541: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
542: for each row that you get to ensure that your application does
543: not bleed memory.
545: Not Collective
547: Input Parameters:
548: + mat - the matrix
549: - row - the row to get
551: Output Parameters:
552: + ncols - if not `NULL`, the number of nonzeros in `row`
553: . cols - if not `NULL`, the column numbers
554: - vals - if not `NULL`, the numerical values
556: Level: advanced
558: Notes:
559: This routine is provided for people who need to have direct access
560: to the structure of a matrix. We hope that we provide enough
561: high-level matrix routines that few users will need it.
563: `MatGetRow()` always returns 0-based column indices, regardless of
564: whether the internal representation is 0-based (default) or 1-based.
566: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
567: not wish to extract these quantities.
569: The user can only examine the values extracted with `MatGetRow()`;
570: the values CANNOT be altered. To change the matrix entries, one
571: must use `MatSetValues()`.
573: You can only have one call to `MatGetRow()` outstanding for a particular
574: matrix at a time, per processor. `MatGetRow()` can only obtain rows
575: associated with the given processor, it cannot get rows from the
576: other processors; for that we suggest using `MatCreateSubMatrices()`, then
577: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
578: is in the global number of rows.
580: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
582: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
584: Fortran Note:
585: .vb
586: PetscInt, pointer :: cols(:)
587: PetscScalar, pointer :: vals(:)
588: .ve
590: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
591: @*/
592: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
593: {
594: PetscInt incols;
596: PetscFunctionBegin;
599: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
600: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
601: MatCheckPreallocated(mat, 1);
602: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
603: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
604: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
605: if (ncols) *ncols = incols;
606: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
607: PetscFunctionReturn(PETSC_SUCCESS);
608: }
610: /*@
611: MatConjugate - replaces the matrix values with their complex conjugates
613: Logically Collective
615: Input Parameter:
616: . mat - the matrix
618: Level: advanced
620: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
621: @*/
622: PetscErrorCode MatConjugate(Mat mat)
623: {
624: PetscFunctionBegin;
626: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
627: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
628: PetscUseTypeMethod(mat, conjugate);
629: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
630: }
631: PetscFunctionReturn(PETSC_SUCCESS);
632: }
634: /*@C
635: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
637: Not Collective
639: Input Parameters:
640: + mat - the matrix
641: . row - the row to get
642: . ncols - the number of nonzeros
643: . cols - the columns of the nonzeros
644: - vals - if nonzero the column values
646: Level: advanced
648: Notes:
649: This routine should be called after you have finished examining the entries.
651: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
652: us of the array after it has been restored. If you pass `NULL`, it will
653: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
655: Fortran Note:
656: .vb
657: PetscInt, pointer :: cols(:)
658: PetscScalar, pointer :: vals(:)
659: .ve
661: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
662: @*/
663: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
664: {
665: PetscFunctionBegin;
667: if (ncols) PetscAssertPointer(ncols, 3);
668: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
669: PetscTryTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
670: if (ncols) *ncols = 0;
671: if (cols) *cols = NULL;
672: if (vals) *vals = NULL;
673: PetscFunctionReturn(PETSC_SUCCESS);
674: }
676: /*@
677: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
678: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
680: Not Collective
682: Input Parameter:
683: . mat - the matrix
685: Level: advanced
687: Note:
688: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
690: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
691: @*/
692: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
693: {
694: PetscFunctionBegin;
697: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
698: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
699: MatCheckPreallocated(mat, 1);
700: PetscTryTypeMethod(mat, getrowuppertriangular);
701: PetscFunctionReturn(PETSC_SUCCESS);
702: }
704: /*@
705: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
707: Not Collective
709: Input Parameter:
710: . mat - the matrix
712: Level: advanced
714: Note:
715: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
717: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
718: @*/
719: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
720: {
721: PetscFunctionBegin;
724: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
725: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
726: MatCheckPreallocated(mat, 1);
727: PetscTryTypeMethod(mat, restorerowuppertriangular);
728: PetscFunctionReturn(PETSC_SUCCESS);
729: }
731: /*@
732: MatSetOptionsPrefix - Sets the prefix used for searching for all
733: `Mat` options in the database.
735: Logically Collective
737: Input Parameters:
738: + A - the matrix
739: - prefix - the prefix to prepend to all option names
741: Level: advanced
743: Notes:
744: A hyphen (-) must NOT be given at the beginning of the prefix name.
745: The first character of all runtime options is AUTOMATICALLY the hyphen.
747: This is NOT used for options for the factorization of the matrix. Normally the
748: prefix is automatically passed in from the PC calling the factorization. To set
749: it directly use `MatSetOptionsPrefixFactor()`
751: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
752: @*/
753: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
754: {
755: PetscFunctionBegin;
757: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
758: PetscFunctionReturn(PETSC_SUCCESS);
759: }
761: /*@
762: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
763: for matrices created with `MatGetFactor()`
765: Logically Collective
767: Input Parameters:
768: + A - the matrix
769: - prefix - the prefix to prepend to all option names for the factored matrix
771: Level: developer
773: Notes:
774: A hyphen (-) must NOT be given at the beginning of the prefix name.
775: The first character of all runtime options is AUTOMATICALLY the hyphen.
777: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
778: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
780: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
781: @*/
782: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
783: {
784: PetscFunctionBegin;
786: if (prefix) {
787: PetscAssertPointer(prefix, 2);
788: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
789: if (prefix != A->factorprefix) {
790: PetscCall(PetscFree(A->factorprefix));
791: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
792: }
793: } else PetscCall(PetscFree(A->factorprefix));
794: PetscFunctionReturn(PETSC_SUCCESS);
795: }
797: /*@
798: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
799: for matrices created with `MatGetFactor()`
801: Logically Collective
803: Input Parameters:
804: + A - the matrix
805: - prefix - the prefix to prepend to all option names for the factored matrix
807: Level: developer
809: Notes:
810: A hyphen (-) must NOT be given at the beginning of the prefix name.
811: The first character of all runtime options is AUTOMATICALLY the hyphen.
813: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
814: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
816: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
817: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
818: `MatSetOptionsPrefix()`
819: @*/
820: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
821: {
822: size_t len1, len2, new_len;
824: PetscFunctionBegin;
826: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
827: if (!A->factorprefix) {
828: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
829: PetscFunctionReturn(PETSC_SUCCESS);
830: }
831: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
833: PetscCall(PetscStrlen(A->factorprefix, &len1));
834: PetscCall(PetscStrlen(prefix, &len2));
835: new_len = len1 + len2 + 1;
836: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
837: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
838: PetscFunctionReturn(PETSC_SUCCESS);
839: }
841: /*@
842: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
843: matrix options in the database.
845: Logically Collective
847: Input Parameters:
848: + A - the matrix
849: - prefix - the prefix to prepend to all option names
851: Level: advanced
853: Note:
854: A hyphen (-) must NOT be given at the beginning of the prefix name.
855: The first character of all runtime options is AUTOMATICALLY the hyphen.
857: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
858: @*/
859: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
860: {
861: PetscFunctionBegin;
863: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
864: PetscFunctionReturn(PETSC_SUCCESS);
865: }
867: /*@
868: MatGetOptionsPrefix - Gets the prefix used for searching for all
869: matrix options in the database.
871: Not Collective
873: Input Parameter:
874: . A - the matrix
876: Output Parameter:
877: . prefix - pointer to the prefix string used
879: Level: advanced
881: Fortran Note:
882: The user should pass in a string `prefix` of
883: sufficient length to hold the prefix.
885: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
886: @*/
887: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
888: {
889: PetscFunctionBegin;
891: PetscAssertPointer(prefix, 2);
892: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
893: PetscFunctionReturn(PETSC_SUCCESS);
894: }
896: /*@
897: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
899: Not Collective
901: Input Parameter:
902: . A - the matrix
904: Output Parameter:
905: . state - the object state
907: Level: advanced
909: Note:
910: Object state is an integer which gets increased every time
911: the object is changed. By saving and later querying the object state
912: one can determine whether information about the object is still current.
914: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
916: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
917: @*/
918: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
919: {
920: PetscFunctionBegin;
922: PetscAssertPointer(state, 2);
923: PetscCall(PetscObjectStateGet((PetscObject)A, state));
924: PetscFunctionReturn(PETSC_SUCCESS);
925: }
927: /*@
928: MatResetPreallocation - Reset matrix to use the original preallocation values provided by the user, for example with `MatXAIJSetPreallocation()`
930: Collective
932: Input Parameter:
933: . A - the matrix
935: Level: beginner
937: Notes:
938: After calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY` the matrix data structures represent the nonzeros assigned to the
939: matrix. If that space is less than the preallocated space that extra preallocated space is no longer available to take on new values. `MatResetPreallocation()`
940: makes all of the preallocation space available
942: Current values in the matrix are lost in this call
944: Currently only supported for `MATAIJ` matrices.
946: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
947: @*/
948: PetscErrorCode MatResetPreallocation(Mat A)
949: {
950: PetscFunctionBegin;
953: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
954: PetscFunctionReturn(PETSC_SUCCESS);
955: }
957: /*@
958: MatResetHash - Reset the matrix so that it will use a hash table for the next round of `MatSetValues()` and `MatAssemblyBegin()`/`MatAssemblyEnd()`.
960: Collective
962: Input Parameter:
963: . A - the matrix
965: Level: intermediate
967: Notes:
968: The matrix will again delete the hash table data structures after following calls to `MatAssemblyBegin()`/`MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
970: Currently only supported for `MATAIJ` matrices.
972: .seealso: [](ch_matrices), `Mat`, `MatResetPreallocation()`
973: @*/
974: PetscErrorCode MatResetHash(Mat A)
975: {
976: PetscFunctionBegin;
979: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset to hash state after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
980: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
981: PetscUseMethod(A, "MatResetHash_C", (Mat), (A));
982: /* These flags are used to determine whether certain setups occur */
983: A->was_assembled = PETSC_FALSE;
984: A->assembled = PETSC_FALSE;
985: /* Log that the state of this object has changed; this will help guarantee that preconditioners get re-setup */
986: PetscCall(PetscObjectStateIncrease((PetscObject)A));
987: PetscFunctionReturn(PETSC_SUCCESS);
988: }
990: /*@
991: MatSetUp - Sets up the internal matrix data structures for later use by the matrix
993: Collective
995: Input Parameter:
996: . A - the matrix
998: Level: advanced
1000: Notes:
1001: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
1002: setting values in the matrix.
1004: This routine is called internally by other `Mat` functions when needed so rarely needs to be called by users
1006: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
1007: @*/
1008: PetscErrorCode MatSetUp(Mat A)
1009: {
1010: PetscFunctionBegin;
1012: if (!((PetscObject)A)->type_name) {
1013: PetscMPIInt size;
1015: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1016: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
1017: }
1018: if (!A->preallocated) PetscTryTypeMethod(A, setup);
1019: PetscCall(PetscLayoutSetUp(A->rmap));
1020: PetscCall(PetscLayoutSetUp(A->cmap));
1021: A->preallocated = PETSC_TRUE;
1022: PetscFunctionReturn(PETSC_SUCCESS);
1023: }
1025: #if defined(PETSC_HAVE_SAWS)
1026: #include <petscviewersaws.h>
1027: #endif
1029: /*
1030: If threadsafety is on extraneous matrices may be printed
1032: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
1033: */
1034: #if !defined(PETSC_HAVE_THREADSAFETY)
1035: static PetscInt insidematview = 0;
1036: #endif
1038: /*@
1039: MatViewFromOptions - View properties of the matrix based on options set in the options database
1041: Collective
1043: Input Parameters:
1044: + A - the matrix
1045: . obj - optional additional object that provides the options prefix to use
1046: - name - command line option
1048: Options Database Key:
1049: . -mat_view [viewertype]:... - the viewer and its options
1051: Level: intermediate
1053: Note:
1054: .vb
1055: If no value is provided ascii:stdout is used
1056: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
1057: for example ascii::ascii_info prints just the information about the object not all details
1058: unless :append is given filename opens in write mode, overwriting what was already there
1059: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
1060: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
1061: socket[:port] defaults to the standard output port
1062: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
1063: .ve
1065: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1066: @*/
1067: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1068: {
1069: PetscFunctionBegin;
1071: #if !defined(PETSC_HAVE_THREADSAFETY)
1072: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1073: #endif
1074: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1075: PetscFunctionReturn(PETSC_SUCCESS);
1076: }
1078: /*@
1079: MatView - display information about a matrix in a variety ways
1081: Collective on viewer
1083: Input Parameters:
1084: + mat - the matrix
1085: - viewer - visualization context
1087: Options Database Keys:
1088: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1089: . -mat_view ::ascii_info_detail - Prints more detailed info
1090: . -mat_view - Prints matrix in ASCII format
1091: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1092: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1093: . -display <name> - Sets display name (default is host)
1094: . -draw_pause <sec> - Sets number of seconds to pause after display
1095: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1096: . -viewer_socket_machine <machine> - -
1097: . -viewer_socket_port <port> - -
1098: . -mat_view binary - save matrix to file in binary format
1099: - -viewer_binary_filename <name> - -
1101: Level: beginner
1103: Notes:
1104: The available visualization contexts include
1105: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1106: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1107: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1108: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1110: The user can open alternative visualization contexts with
1111: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1112: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a specified file; corresponding input uses `MatLoad()`
1113: . `PetscViewerDrawOpen()` - Outputs nonzero matrix nonzero structure to an X window display
1114: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer, `PETSCVIEWERSOCKET`. Only the `MATSEQDENSE` and `MATAIJ` types support this viewer.
1116: The user can call `PetscViewerPushFormat()` to specify the output
1117: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1118: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1119: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1120: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1121: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1122: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse format common among all matrix types
1123: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific format (which is in many cases the same as the default)
1124: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix size and structure (not the matrix entries)
1125: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about the matrix nonzero structure (still not vector or matrix entries)
1127: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1128: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1130: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1132: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1133: viewer is used.
1135: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1136: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1138: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1139: and then use the following mouse functions.
1140: .vb
1141: left mouse: zoom in
1142: middle mouse: zoom out
1143: right mouse: continue with the simulation
1144: .ve
1146: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1147: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1148: @*/
1149: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1150: {
1151: PetscInt rows, cols, rbs, cbs;
1152: PetscBool isascii, isstring, issaws;
1153: PetscViewerFormat format;
1154: PetscMPIInt size;
1156: PetscFunctionBegin;
1159: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1162: PetscCall(PetscViewerGetFormat(viewer, &format));
1163: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1164: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1166: #if !defined(PETSC_HAVE_THREADSAFETY)
1167: insidematview++;
1168: #endif
1169: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1170: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1171: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1172: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1174: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1175: if (isascii) {
1176: if (!mat->preallocated) {
1177: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1178: #if !defined(PETSC_HAVE_THREADSAFETY)
1179: insidematview--;
1180: #endif
1181: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1182: PetscFunctionReturn(PETSC_SUCCESS);
1183: }
1184: if (!mat->assembled) {
1185: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1186: #if !defined(PETSC_HAVE_THREADSAFETY)
1187: insidematview--;
1188: #endif
1189: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1190: PetscFunctionReturn(PETSC_SUCCESS);
1191: }
1192: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1193: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1194: MatNullSpace nullsp, transnullsp;
1196: PetscCall(PetscViewerASCIIPushTab(viewer));
1197: PetscCall(MatGetSize(mat, &rows, &cols));
1198: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1199: if (rbs != 1 || cbs != 1) {
1200: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1201: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1202: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1203: if (mat->factortype) {
1204: MatSolverType solver;
1205: PetscCall(MatFactorGetSolverType(mat, &solver));
1206: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1207: }
1208: if (mat->ops->getinfo) {
1209: MatInfo info;
1210: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1211: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1212: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1213: }
1214: PetscCall(MatGetNullSpace(mat, &nullsp));
1215: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1216: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1217: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1218: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1219: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1220: PetscCall(PetscViewerASCIIPushTab(viewer));
1221: PetscCall(MatProductView(mat, viewer));
1222: PetscCall(PetscViewerASCIIPopTab(viewer));
1223: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1224: IS tmp;
1226: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1227: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1228: PetscCall(PetscViewerASCIIPushTab(viewer));
1229: PetscCall(ISView(tmp, viewer));
1230: PetscCall(PetscViewerASCIIPopTab(viewer));
1231: PetscCall(ISDestroy(&tmp));
1232: }
1233: }
1234: } else if (issaws) {
1235: #if defined(PETSC_HAVE_SAWS)
1236: PetscMPIInt rank;
1238: PetscCall(PetscObjectName((PetscObject)mat));
1239: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1240: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1241: #endif
1242: } else if (isstring) {
1243: const char *type;
1244: PetscCall(MatGetType(mat, &type));
1245: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1246: PetscTryTypeMethod(mat, view, viewer);
1247: }
1248: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1249: PetscCall(PetscViewerASCIIPushTab(viewer));
1250: PetscUseTypeMethod(mat, viewnative, viewer);
1251: PetscCall(PetscViewerASCIIPopTab(viewer));
1252: } else if (mat->ops->view) {
1253: PetscCall(PetscViewerASCIIPushTab(viewer));
1254: PetscUseTypeMethod(mat, view, viewer);
1255: PetscCall(PetscViewerASCIIPopTab(viewer));
1256: }
1257: if (isascii) {
1258: PetscCall(PetscViewerGetFormat(viewer, &format));
1259: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1260: }
1261: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1262: #if !defined(PETSC_HAVE_THREADSAFETY)
1263: insidematview--;
1264: #endif
1265: PetscFunctionReturn(PETSC_SUCCESS);
1266: }
1268: #if defined(PETSC_USE_DEBUG)
1269: #include <../src/sys/totalview/tv_data_display.h>
1270: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1271: {
1272: TV_add_row("Local rows", "int", &mat->rmap->n);
1273: TV_add_row("Local columns", "int", &mat->cmap->n);
1274: TV_add_row("Global rows", "int", &mat->rmap->N);
1275: TV_add_row("Global columns", "int", &mat->cmap->N);
1276: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1277: return TV_format_OK;
1278: }
1279: #endif
1281: /*@
1282: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1283: with `MatView()`. The matrix format is determined from the options database.
1284: Generates a parallel MPI matrix if the communicator has more than one
1285: processor. The default matrix type is `MATAIJ`.
1287: Collective
1289: Input Parameters:
1290: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1291: or some related function before a call to `MatLoad()`
1292: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1294: Options Database Key:
1295: . -matload_block_size <bs> - set block size
1297: Level: beginner
1299: Notes:
1300: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1301: `Mat` before calling this routine if you wish to set it from the options database.
1303: `MatLoad()` automatically loads into the options database any options
1304: given in the file filename.info where filename is the name of the file
1305: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1306: file will be ignored if you use the -viewer_binary_skip_info option.
1308: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1309: sets the default matrix type AIJ and sets the local and global sizes.
1310: If type and/or size is already set, then the same are used.
1312: In parallel, each processor can load a subset of rows (or the
1313: entire matrix). This routine is especially useful when a large
1314: matrix is stored on disk and only part of it is desired on each
1315: processor. For example, a parallel solver may access only some of
1316: the rows from each processor. The algorithm used here reads
1317: relatively small blocks of data rather than reading the entire
1318: matrix and then subsetting it.
1320: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1321: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1322: or the sequence like
1323: .vb
1324: `PetscViewer` v;
1325: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1326: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1327: `PetscViewerSetFromOptions`(v);
1328: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1329: `PetscViewerFileSetName`(v,"datafile");
1330: .ve
1331: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1332: $ -viewer_type {binary, hdf5}
1334: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1335: and src/mat/tutorials/ex10.c with the second approach.
1337: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1338: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1339: Multiple objects, both matrices and vectors, can be stored within the same file.
1340: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1342: Most users should not need to know the details of the binary storage
1343: format, since `MatLoad()` and `MatView()` completely hide these details.
1344: But for anyone who is interested, the standard binary matrix storage
1345: format is
1347: .vb
1348: PetscInt MAT_FILE_CLASSID
1349: PetscInt number of rows
1350: PetscInt number of columns
1351: PetscInt total number of nonzeros
1352: PetscInt *number nonzeros in each row
1353: PetscInt *column indices of all nonzeros (starting index is zero)
1354: PetscScalar *values of all nonzeros
1355: .ve
1356: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1357: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1358: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1360: PETSc automatically does the byte swapping for
1361: machines that store the bytes reversed. Thus if you write your own binary
1362: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1363: and `PetscBinaryWrite()` to see how this may be done.
1365: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1366: Each processor's chunk is loaded independently by its owning MPI process.
1367: Multiple objects, both matrices and vectors, can be stored within the same file.
1368: They are looked up by their PetscObject name.
1370: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1371: by default the same structure and naming of the AIJ arrays and column count
1372: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1373: $ save example.mat A b -v7.3
1374: can be directly read by this routine (see Reference 1 for details).
1376: Depending on your MATLAB version, this format might be a default,
1377: otherwise you can set it as default in Preferences.
1379: Unless -nocompression flag is used to save the file in MATLAB,
1380: PETSc must be configured with ZLIB package.
1382: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1384: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1386: Corresponding `MatView()` is not yet implemented.
1388: The loaded matrix is actually a transpose of the original one in MATLAB,
1389: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1390: With this format, matrix is automatically transposed by PETSc,
1391: unless the matrix is marked as SPD or symmetric
1392: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1394: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1396: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1397: @*/
1398: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1399: {
1400: PetscBool flg;
1402: PetscFunctionBegin;
1406: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1408: flg = PETSC_FALSE;
1409: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1410: if (flg) {
1411: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1412: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1413: }
1414: flg = PETSC_FALSE;
1415: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1416: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1418: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1419: PetscUseTypeMethod(mat, load, viewer);
1420: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1421: PetscFunctionReturn(PETSC_SUCCESS);
1422: }
1424: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1425: {
1426: Mat_Redundant *redund = *redundant;
1428: PetscFunctionBegin;
1429: if (redund) {
1430: if (redund->matseq) { /* via MatCreateSubMatrices() */
1431: PetscCall(ISDestroy(&redund->isrow));
1432: PetscCall(ISDestroy(&redund->iscol));
1433: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1434: } else {
1435: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1436: PetscCall(PetscFree(redund->sbuf_j));
1437: PetscCall(PetscFree(redund->sbuf_a));
1438: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1439: PetscCall(PetscFree(redund->rbuf_j[i]));
1440: PetscCall(PetscFree(redund->rbuf_a[i]));
1441: }
1442: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1443: }
1445: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1446: PetscCall(PetscFree(redund));
1447: }
1448: PetscFunctionReturn(PETSC_SUCCESS);
1449: }
1451: /*@
1452: MatDestroy - Frees space taken by a matrix.
1454: Collective
1456: Input Parameter:
1457: . A - the matrix
1459: Level: beginner
1461: Developer Note:
1462: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1463: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1464: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1465: if changes are needed here.
1467: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1468: @*/
1469: PetscErrorCode MatDestroy(Mat *A)
1470: {
1471: PetscFunctionBegin;
1472: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1474: if (--((PetscObject)*A)->refct > 0) {
1475: *A = NULL;
1476: PetscFunctionReturn(PETSC_SUCCESS);
1477: }
1479: /* if memory was published with SAWs then destroy it */
1480: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1481: PetscTryTypeMethod(*A, destroy);
1483: PetscCall(PetscFree((*A)->factorprefix));
1484: PetscCall(PetscFree((*A)->defaultvectype));
1485: PetscCall(PetscFree((*A)->defaultrandtype));
1486: PetscCall(PetscFree((*A)->bsizes));
1487: PetscCall(PetscFree((*A)->solvertype));
1488: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1489: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1490: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1491: PetscCall(MatProductClear(*A));
1492: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1493: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1494: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1495: PetscCall(MatDestroy(&(*A)->schur));
1496: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1497: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1498: PetscCall(PetscHeaderDestroy(A));
1499: PetscFunctionReturn(PETSC_SUCCESS);
1500: }
1502: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1503: /*@
1504: MatSetValues - Inserts or adds a block of values into a matrix.
1505: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1506: MUST be called after all calls to `MatSetValues()` have been completed.
1508: Not Collective
1510: Input Parameters:
1511: + mat - the matrix
1512: . v - a logically two-dimensional array of values
1513: . m - the number of rows
1514: . idxm - the global indices of the rows
1515: . n - the number of columns
1516: . idxn - the global indices of the columns
1517: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1519: Level: beginner
1521: Notes:
1522: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1524: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1525: options cannot be mixed without intervening calls to the assembly
1526: routines.
1528: `MatSetValues()` uses 0-based row and column numbers in Fortran
1529: as well as in C.
1531: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1532: simply ignored. This allows easily inserting element stiffness matrices
1533: with homogeneous Dirichlet boundary conditions that you don't want represented
1534: in the matrix.
1536: Efficiency Alert:
1537: The routine `MatSetValuesBlocked()` may offer much better efficiency
1538: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1540: Fortran Notes:
1541: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1542: .vb
1543: call MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
1544: .ve
1546: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1548: Developer Note:
1549: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1550: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1552: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1553: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1554: @*/
1555: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1556: {
1557: PetscFunctionBeginHot;
1560: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1561: PetscAssertPointer(idxm, 3);
1562: PetscAssertPointer(idxn, 5);
1563: MatCheckPreallocated(mat, 1);
1565: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1566: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1568: if (PetscDefined(USE_DEBUG)) {
1569: PetscInt i, j;
1571: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1572: if (v) {
1573: for (i = 0; i < m; i++) {
1574: for (j = 0; j < n; j++) {
1575: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1576: #if defined(PETSC_USE_COMPLEX)
1577: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1578: #else
1579: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1580: #endif
1581: }
1582: }
1583: }
1584: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1585: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1586: }
1588: if (mat->assembled) {
1589: mat->was_assembled = PETSC_TRUE;
1590: mat->assembled = PETSC_FALSE;
1591: }
1592: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1593: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1594: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1595: PetscFunctionReturn(PETSC_SUCCESS);
1596: }
1598: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1599: /*@
1600: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1601: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1602: MUST be called after all calls to `MatSetValues()` have been completed.
1604: Not Collective
1606: Input Parameters:
1607: + mat - the matrix
1608: . v - a logically two-dimensional array of values
1609: . ism - the rows to provide
1610: . isn - the columns to provide
1611: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1613: Level: beginner
1615: Notes:
1616: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1618: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1619: options cannot be mixed without intervening calls to the assembly
1620: routines.
1622: `MatSetValues()` uses 0-based row and column numbers in Fortran
1623: as well as in C.
1625: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1626: simply ignored. This allows easily inserting element stiffness matrices
1627: with homogeneous Dirichlet boundary conditions that you don't want represented
1628: in the matrix.
1630: Efficiency Alert:
1631: The routine `MatSetValuesBlocked()` may offer much better efficiency
1632: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1634: This is currently not optimized for any particular `ISType`
1636: Developer Note:
1637: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1638: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1640: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1641: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1642: @*/
1643: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1644: {
1645: PetscInt m, n;
1646: const PetscInt *rows, *cols;
1648: PetscFunctionBeginHot;
1650: PetscCall(ISGetIndices(ism, &rows));
1651: PetscCall(ISGetIndices(isn, &cols));
1652: PetscCall(ISGetLocalSize(ism, &m));
1653: PetscCall(ISGetLocalSize(isn, &n));
1654: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1655: PetscCall(ISRestoreIndices(ism, &rows));
1656: PetscCall(ISRestoreIndices(isn, &cols));
1657: PetscFunctionReturn(PETSC_SUCCESS);
1658: }
1660: /*@
1661: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1662: values into a matrix
1664: Not Collective
1666: Input Parameters:
1667: + mat - the matrix
1668: . row - the (block) row to set
1669: - v - a logically two-dimensional array of values
1671: Level: intermediate
1673: Notes:
1674: The values, `v`, are column-oriented (for the block version) and sorted
1676: All the nonzero values in `row` must be provided
1678: The matrix must have previously had its column indices set, likely by having been assembled.
1680: `row` must belong to this MPI process
1682: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1683: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1684: @*/
1685: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1686: {
1687: PetscInt globalrow;
1689: PetscFunctionBegin;
1692: PetscAssertPointer(v, 3);
1693: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1694: PetscCall(MatSetValuesRow(mat, globalrow, v));
1695: PetscFunctionReturn(PETSC_SUCCESS);
1696: }
1698: /*@
1699: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1700: values into a matrix
1702: Not Collective
1704: Input Parameters:
1705: + mat - the matrix
1706: . row - the (block) row to set
1707: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1709: Level: advanced
1711: Notes:
1712: The values, `v`, are column-oriented for the block version.
1714: All the nonzeros in `row` must be provided
1716: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1718: `row` must belong to this process
1720: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1721: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1722: @*/
1723: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1724: {
1725: PetscFunctionBeginHot;
1728: MatCheckPreallocated(mat, 1);
1729: PetscAssertPointer(v, 3);
1730: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1731: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1732: mat->insertmode = INSERT_VALUES;
1734: if (mat->assembled) {
1735: mat->was_assembled = PETSC_TRUE;
1736: mat->assembled = PETSC_FALSE;
1737: }
1738: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1739: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1740: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1741: PetscFunctionReturn(PETSC_SUCCESS);
1742: }
1744: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1745: /*@
1746: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1747: Using structured grid indexing
1749: Not Collective
1751: Input Parameters:
1752: + mat - the matrix
1753: . m - number of rows being entered
1754: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1755: . n - number of columns being entered
1756: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1757: . v - a logically two-dimensional array of values
1758: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1760: Level: beginner
1762: Notes:
1763: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1765: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1766: options cannot be mixed without intervening calls to the assembly
1767: routines.
1769: The grid coordinates are across the entire grid, not just the local portion
1771: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1772: as well as in C.
1774: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1776: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1777: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1779: The columns and rows in the stencil passed in MUST be contained within the
1780: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1781: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1782: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1783: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1785: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1786: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1787: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1788: `DM_BOUNDARY_PERIODIC` boundary type.
1790: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1791: a single value per point) you can skip filling those indices.
1793: Inspired by the structured grid interface to the HYPRE package
1794: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1796: Efficiency Alert:
1797: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1798: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1800: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1801: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1802: @*/
1803: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1804: {
1805: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1806: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1807: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1809: PetscFunctionBegin;
1810: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1813: PetscAssertPointer(idxm, 3);
1814: PetscAssertPointer(idxn, 5);
1816: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1817: jdxm = buf;
1818: jdxn = buf + m;
1819: } else {
1820: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1821: jdxm = bufm;
1822: jdxn = bufn;
1823: }
1824: for (i = 0; i < m; i++) {
1825: for (j = 0; j < 3 - sdim; j++) dxm++;
1826: tmp = *dxm++ - starts[0];
1827: for (j = 0; j < dim - 1; j++) {
1828: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1829: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1830: }
1831: if (mat->stencil.noc) dxm++;
1832: jdxm[i] = tmp;
1833: }
1834: for (i = 0; i < n; i++) {
1835: for (j = 0; j < 3 - sdim; j++) dxn++;
1836: tmp = *dxn++ - starts[0];
1837: for (j = 0; j < dim - 1; j++) {
1838: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1839: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1840: }
1841: if (mat->stencil.noc) dxn++;
1842: jdxn[i] = tmp;
1843: }
1844: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1845: PetscCall(PetscFree2(bufm, bufn));
1846: PetscFunctionReturn(PETSC_SUCCESS);
1847: }
1849: /*@
1850: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1851: Using structured grid indexing
1853: Not Collective
1855: Input Parameters:
1856: + mat - the matrix
1857: . m - number of rows being entered
1858: . idxm - grid coordinates for matrix rows being entered
1859: . n - number of columns being entered
1860: . idxn - grid coordinates for matrix columns being entered
1861: . v - a logically two-dimensional array of values
1862: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1864: Level: beginner
1866: Notes:
1867: By default the values, `v`, are row-oriented and unsorted.
1868: See `MatSetOption()` for other options.
1870: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1871: options cannot be mixed without intervening calls to the assembly
1872: routines.
1874: The grid coordinates are across the entire grid, not just the local portion
1876: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1877: as well as in C.
1879: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1881: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1882: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1884: The columns and rows in the stencil passed in MUST be contained within the
1885: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1886: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1887: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1888: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1890: Negative indices may be passed in idxm and idxn, these rows and columns are
1891: simply ignored. This allows easily inserting element stiffness matrices
1892: with homogeneous Dirichlet boundary conditions that you don't want represented
1893: in the matrix.
1895: Inspired by the structured grid interface to the HYPRE package
1896: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1898: Fortran Note:
1899: `idxm` and `idxn` should be declared as
1900: $ MatStencil idxm(4,m),idxn(4,n)
1901: and the values inserted using
1902: .vb
1903: idxm(MatStencil_i,1) = i
1904: idxm(MatStencil_j,1) = j
1905: idxm(MatStencil_k,1) = k
1906: etc
1907: .ve
1909: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1910: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1911: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1912: @*/
1913: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1914: {
1915: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1916: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1917: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1919: PetscFunctionBegin;
1920: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1923: PetscAssertPointer(idxm, 3);
1924: PetscAssertPointer(idxn, 5);
1925: PetscAssertPointer(v, 6);
1927: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1928: jdxm = buf;
1929: jdxn = buf + m;
1930: } else {
1931: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1932: jdxm = bufm;
1933: jdxn = bufn;
1934: }
1935: for (i = 0; i < m; i++) {
1936: for (j = 0; j < 3 - sdim; j++) dxm++;
1937: tmp = *dxm++ - starts[0];
1938: for (j = 0; j < sdim - 1; j++) {
1939: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1940: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1941: }
1942: dxm++;
1943: jdxm[i] = tmp;
1944: }
1945: for (i = 0; i < n; i++) {
1946: for (j = 0; j < 3 - sdim; j++) dxn++;
1947: tmp = *dxn++ - starts[0];
1948: for (j = 0; j < sdim - 1; j++) {
1949: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1950: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1951: }
1952: dxn++;
1953: jdxn[i] = tmp;
1954: }
1955: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1956: PetscCall(PetscFree2(bufm, bufn));
1957: PetscFunctionReturn(PETSC_SUCCESS);
1958: }
1960: /*@
1961: MatSetStencil - Sets the grid information for setting values into a matrix via
1962: `MatSetValuesStencil()`
1964: Not Collective
1966: Input Parameters:
1967: + mat - the matrix
1968: . dim - dimension of the grid 1, 2, or 3
1969: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1970: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1971: - dof - number of degrees of freedom per node
1973: Level: beginner
1975: Notes:
1976: Inspired by the structured grid interface to the HYPRE package
1977: (www.llnl.gov/CASC/hyper)
1979: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1980: user.
1982: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1983: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1984: @*/
1985: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1986: {
1987: PetscFunctionBegin;
1989: PetscAssertPointer(dims, 3);
1990: PetscAssertPointer(starts, 4);
1992: mat->stencil.dim = dim + (dof > 1);
1993: for (PetscInt i = 0; i < dim; i++) {
1994: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1995: mat->stencil.starts[i] = starts[dim - i - 1];
1996: }
1997: mat->stencil.dims[dim] = dof;
1998: mat->stencil.starts[dim] = 0;
1999: mat->stencil.noc = (PetscBool)(dof == 1);
2000: PetscFunctionReturn(PETSC_SUCCESS);
2001: }
2003: /*@
2004: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
2006: Not Collective
2008: Input Parameters:
2009: + mat - the matrix
2010: . v - a logically two-dimensional array of values
2011: . m - the number of block rows
2012: . idxm - the global block indices
2013: . n - the number of block columns
2014: . idxn - the global block indices
2015: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
2017: Level: intermediate
2019: Notes:
2020: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
2021: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
2023: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
2024: NOT the total number of rows/columns; for example, if the block size is 2 and
2025: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
2026: The values in `idxm` would be 1 2; that is the first index for each block divided by
2027: the block size.
2029: You must call `MatSetBlockSize()` when constructing this matrix (before
2030: preallocating it).
2032: By default the values, `v`, are row-oriented, so the layout of
2033: `v` is the same as for `MatSetValues()`. See `MatSetOption()` for other options.
2035: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
2036: options cannot be mixed without intervening calls to the assembly
2037: routines.
2039: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
2040: as well as in C.
2042: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2043: simply ignored. This allows easily inserting element stiffness matrices
2044: with homogeneous Dirichlet boundary conditions that you don't want represented
2045: in the matrix.
2047: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2048: internal searching must be done to determine where to place the
2049: data in the matrix storage space. By instead inserting blocks of
2050: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2051: reduced.
2053: Example:
2054: .vb
2055: Suppose m=n=2 and block size(bs) = 2 The array is
2057: 1 2 | 3 4
2058: 5 6 | 7 8
2059: - - - | - - -
2060: 9 10 | 11 12
2061: 13 14 | 15 16
2063: v[] should be passed in like
2064: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2066: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2067: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2068: .ve
2070: Fortran Notes:
2071: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2072: .vb
2073: call MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
2074: .ve
2076: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2078: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2079: @*/
2080: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2081: {
2082: PetscFunctionBeginHot;
2085: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2086: PetscAssertPointer(idxm, 3);
2087: PetscAssertPointer(idxn, 5);
2088: MatCheckPreallocated(mat, 1);
2089: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2090: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2091: if (PetscDefined(USE_DEBUG)) {
2092: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2093: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2094: }
2095: if (PetscDefined(USE_DEBUG)) {
2096: PetscInt rbs, cbs, M, N, i;
2097: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2098: PetscCall(MatGetSize(mat, &M, &N));
2099: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2100: for (i = 0; i < n; i++)
2101: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2102: }
2103: if (mat->assembled) {
2104: mat->was_assembled = PETSC_TRUE;
2105: mat->assembled = PETSC_FALSE;
2106: }
2107: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2108: if (mat->ops->setvaluesblocked) {
2109: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2110: } else {
2111: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2112: PetscInt i, j, bs, cbs;
2114: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2115: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2116: iidxm = buf;
2117: iidxn = buf + m * bs;
2118: } else {
2119: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2120: iidxm = bufr;
2121: iidxn = bufc;
2122: }
2123: for (i = 0; i < m; i++) {
2124: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2125: }
2126: if (m != n || bs != cbs || idxm != idxn) {
2127: for (i = 0; i < n; i++) {
2128: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2129: }
2130: } else iidxn = iidxm;
2131: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2132: PetscCall(PetscFree2(bufr, bufc));
2133: }
2134: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2135: PetscFunctionReturn(PETSC_SUCCESS);
2136: }
2138: /*@
2139: MatGetValues - Gets a block of local values from a matrix.
2141: Not Collective; can only return values that are owned by the give process
2143: Input Parameters:
2144: + mat - the matrix
2145: . v - a logically two-dimensional array for storing the values
2146: . m - the number of rows
2147: . idxm - the global indices of the rows
2148: . n - the number of columns
2149: - idxn - the global indices of the columns
2151: Level: advanced
2153: Notes:
2154: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2155: The values, `v`, are then returned in a row-oriented format,
2156: analogous to that used by default in `MatSetValues()`.
2158: `MatGetValues()` uses 0-based row and column numbers in
2159: Fortran as well as in C.
2161: `MatGetValues()` requires that the matrix has been assembled
2162: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2163: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2164: without intermediate matrix assembly.
2166: Negative row or column indices will be ignored and those locations in `v` will be
2167: left unchanged.
2169: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2170: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2171: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2173: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2174: @*/
2175: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2176: {
2177: PetscFunctionBegin;
2180: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2181: PetscAssertPointer(idxm, 3);
2182: PetscAssertPointer(idxn, 5);
2183: PetscAssertPointer(v, 6);
2184: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2185: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2186: MatCheckPreallocated(mat, 1);
2188: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2189: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2190: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2191: PetscFunctionReturn(PETSC_SUCCESS);
2192: }
2194: /*@
2195: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2196: defined previously by `MatSetLocalToGlobalMapping()`
2198: Not Collective
2200: Input Parameters:
2201: + mat - the matrix
2202: . nrow - number of rows
2203: . irow - the row local indices
2204: . ncol - number of columns
2205: - icol - the column local indices
2207: Output Parameter:
2208: . y - a logically two-dimensional array of values
2210: Level: advanced
2212: Notes:
2213: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2215: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2216: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2217: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2218: with `MatSetLocalToGlobalMapping()`.
2220: Developer Note:
2221: This is labelled with C so does not automatically generate Fortran stubs and interfaces
2222: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2224: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2225: `MatSetValuesLocal()`, `MatGetValues()`
2226: @*/
2227: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2228: {
2229: PetscFunctionBeginHot;
2232: MatCheckPreallocated(mat, 1);
2233: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2234: PetscAssertPointer(irow, 3);
2235: PetscAssertPointer(icol, 5);
2236: if (PetscDefined(USE_DEBUG)) {
2237: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2238: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2239: }
2240: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2241: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2242: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2243: else {
2244: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2245: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2246: irowm = buf;
2247: icolm = buf + nrow;
2248: } else {
2249: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2250: irowm = bufr;
2251: icolm = bufc;
2252: }
2253: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2254: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2255: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2256: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2257: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2258: PetscCall(PetscFree2(bufr, bufc));
2259: }
2260: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2261: PetscFunctionReturn(PETSC_SUCCESS);
2262: }
2264: /*@
2265: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2266: the same size. Currently, this can only be called once and creates the given matrix.
2268: Not Collective
2270: Input Parameters:
2271: + mat - the matrix
2272: . nb - the number of blocks
2273: . bs - the number of rows (and columns) in each block
2274: . rows - a concatenation of the rows for each block
2275: - v - a concatenation of logically two-dimensional arrays of values
2277: Level: advanced
2279: Notes:
2280: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2282: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2284: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2285: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2286: @*/
2287: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2288: {
2289: PetscFunctionBegin;
2292: PetscAssertPointer(rows, 4);
2293: PetscAssertPointer(v, 5);
2294: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2296: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2297: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2298: else {
2299: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2300: }
2301: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2302: PetscFunctionReturn(PETSC_SUCCESS);
2303: }
2305: /*@
2306: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2307: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2308: using a local (per-processor) numbering.
2310: Not Collective
2312: Input Parameters:
2313: + x - the matrix
2314: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2315: - cmapping - column mapping
2317: Level: intermediate
2319: Note:
2320: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2322: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2323: @*/
2324: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2325: {
2326: PetscFunctionBegin;
2331: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2332: else {
2333: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2334: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2335: }
2336: PetscFunctionReturn(PETSC_SUCCESS);
2337: }
2339: /*@
2340: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2342: Not Collective
2344: Input Parameter:
2345: . A - the matrix
2347: Output Parameters:
2348: + rmapping - row mapping
2349: - cmapping - column mapping
2351: Level: advanced
2353: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2354: @*/
2355: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2356: {
2357: PetscFunctionBegin;
2360: if (rmapping) {
2361: PetscAssertPointer(rmapping, 2);
2362: *rmapping = A->rmap->mapping;
2363: }
2364: if (cmapping) {
2365: PetscAssertPointer(cmapping, 3);
2366: *cmapping = A->cmap->mapping;
2367: }
2368: PetscFunctionReturn(PETSC_SUCCESS);
2369: }
2371: /*@
2372: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2374: Logically Collective
2376: Input Parameters:
2377: + A - the matrix
2378: . rmap - row layout
2379: - cmap - column layout
2381: Level: advanced
2383: Note:
2384: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2386: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2387: @*/
2388: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2389: {
2390: PetscFunctionBegin;
2392: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2393: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2394: PetscFunctionReturn(PETSC_SUCCESS);
2395: }
2397: /*@
2398: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2400: Not Collective
2402: Input Parameter:
2403: . A - the matrix
2405: Output Parameters:
2406: + rmap - row layout
2407: - cmap - column layout
2409: Level: advanced
2411: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2412: @*/
2413: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2414: {
2415: PetscFunctionBegin;
2418: if (rmap) {
2419: PetscAssertPointer(rmap, 2);
2420: *rmap = A->rmap;
2421: }
2422: if (cmap) {
2423: PetscAssertPointer(cmap, 3);
2424: *cmap = A->cmap;
2425: }
2426: PetscFunctionReturn(PETSC_SUCCESS);
2427: }
2429: /*@
2430: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2431: using a local numbering of the rows and columns.
2433: Not Collective
2435: Input Parameters:
2436: + mat - the matrix
2437: . nrow - number of rows
2438: . irow - the row local indices
2439: . ncol - number of columns
2440: . icol - the column local indices
2441: . y - a logically two-dimensional array of values
2442: - addv - either `INSERT_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2444: Level: intermediate
2446: Notes:
2447: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2449: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2450: options cannot be mixed without intervening calls to the assembly
2451: routines.
2453: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2454: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2456: Fortran Notes:
2457: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2458: .vb
2459: call MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2460: .ve
2462: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2464: Developer Note:
2465: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2466: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2468: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2469: `MatGetValuesLocal()`
2470: @*/
2471: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2472: {
2473: PetscFunctionBeginHot;
2476: MatCheckPreallocated(mat, 1);
2477: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2478: PetscAssertPointer(irow, 3);
2479: PetscAssertPointer(icol, 5);
2480: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2481: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2482: if (PetscDefined(USE_DEBUG)) {
2483: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2484: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2485: }
2487: if (mat->assembled) {
2488: mat->was_assembled = PETSC_TRUE;
2489: mat->assembled = PETSC_FALSE;
2490: }
2491: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2492: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2493: else {
2494: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2495: const PetscInt *irowm, *icolm;
2497: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2498: bufr = buf;
2499: bufc = buf + nrow;
2500: irowm = bufr;
2501: icolm = bufc;
2502: } else {
2503: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2504: irowm = bufr;
2505: icolm = bufc;
2506: }
2507: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2508: else irowm = irow;
2509: if (mat->cmap->mapping) {
2510: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2511: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2512: } else icolm = irowm;
2513: } else icolm = icol;
2514: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2515: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2516: }
2517: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2518: PetscFunctionReturn(PETSC_SUCCESS);
2519: }
2521: /*@
2522: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2523: using a local ordering of the nodes a block at a time.
2525: Not Collective
2527: Input Parameters:
2528: + mat - the matrix
2529: . nrow - number of rows
2530: . irow - the row local indices
2531: . ncol - number of columns
2532: . icol - the column local indices
2533: . y - a logically two-dimensional array of values
2534: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2536: Level: intermediate
2538: Notes:
2539: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2540: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2542: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2543: options cannot be mixed without intervening calls to the assembly
2544: routines.
2546: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2547: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2549: Fortran Notes:
2550: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2551: .vb
2552: call MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2553: .ve
2555: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2557: Developer Note:
2558: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2559: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2561: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2562: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2563: @*/
2564: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2565: {
2566: PetscFunctionBeginHot;
2569: MatCheckPreallocated(mat, 1);
2570: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2571: PetscAssertPointer(irow, 3);
2572: PetscAssertPointer(icol, 5);
2573: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2574: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2575: if (PetscDefined(USE_DEBUG)) {
2576: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2577: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2578: }
2580: if (mat->assembled) {
2581: mat->was_assembled = PETSC_TRUE;
2582: mat->assembled = PETSC_FALSE;
2583: }
2584: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2585: PetscInt irbs, rbs;
2586: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2587: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2588: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2589: }
2590: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2591: PetscInt icbs, cbs;
2592: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2593: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2594: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2595: }
2596: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2597: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2598: else {
2599: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2600: const PetscInt *irowm, *icolm;
2602: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2603: bufr = buf;
2604: bufc = buf + nrow;
2605: irowm = bufr;
2606: icolm = bufc;
2607: } else {
2608: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2609: irowm = bufr;
2610: icolm = bufc;
2611: }
2612: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2613: else irowm = irow;
2614: if (mat->cmap->mapping) {
2615: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2616: PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2617: } else icolm = irowm;
2618: } else icolm = icol;
2619: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2620: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2621: }
2622: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2623: PetscFunctionReturn(PETSC_SUCCESS);
2624: }
2626: /*@
2627: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2629: Collective
2631: Input Parameters:
2632: + mat - the matrix
2633: - x - the vector to be multiplied
2635: Output Parameter:
2636: . y - the result
2638: Level: developer
2640: Note:
2641: The vectors `x` and `y` cannot be the same. I.e., one cannot
2642: call `MatMultDiagonalBlock`(A,y,y).
2644: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2645: @*/
2646: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2647: {
2648: PetscFunctionBegin;
2654: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2655: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2656: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2657: MatCheckPreallocated(mat, 1);
2659: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2660: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2661: PetscFunctionReturn(PETSC_SUCCESS);
2662: }
2664: /*@
2665: MatMult - Computes the matrix-vector product, $y = Ax$.
2667: Neighbor-wise Collective
2669: Input Parameters:
2670: + mat - the matrix
2671: - x - the vector to be multiplied
2673: Output Parameter:
2674: . y - the result
2676: Level: beginner
2678: Note:
2679: The vectors `x` and `y` cannot be the same. I.e., one cannot
2680: call `MatMult`(A,y,y).
2682: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2683: @*/
2684: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2685: {
2686: PetscFunctionBegin;
2690: VecCheckAssembled(x);
2692: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2693: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2694: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2695: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2696: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2697: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2698: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2699: PetscCall(VecSetErrorIfLocked(y, 3));
2700: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2701: MatCheckPreallocated(mat, 1);
2703: PetscCall(VecLockReadPush(x));
2704: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2705: PetscUseTypeMethod(mat, mult, x, y);
2706: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2707: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2708: PetscCall(VecLockReadPop(x));
2709: PetscFunctionReturn(PETSC_SUCCESS);
2710: }
2712: /*@
2713: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2715: Neighbor-wise Collective
2717: Input Parameters:
2718: + mat - the matrix
2719: - x - the vector to be multiplied
2721: Output Parameter:
2722: . y - the result
2724: Level: beginner
2726: Notes:
2727: The vectors `x` and `y` cannot be the same. I.e., one cannot
2728: call `MatMultTranspose`(A,y,y).
2730: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2731: use `MatMultHermitianTranspose()`
2733: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2734: @*/
2735: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2736: {
2737: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2739: PetscFunctionBegin;
2743: VecCheckAssembled(x);
2746: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2747: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2748: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2749: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2750: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2751: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2752: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2753: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2754: MatCheckPreallocated(mat, 1);
2756: if (!mat->ops->multtranspose) {
2757: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2758: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2759: } else op = mat->ops->multtranspose;
2760: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2761: PetscCall(VecLockReadPush(x));
2762: PetscCall((*op)(mat, x, y));
2763: PetscCall(VecLockReadPop(x));
2764: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2765: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2766: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2767: PetscFunctionReturn(PETSC_SUCCESS);
2768: }
2770: /*@
2771: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2773: Neighbor-wise Collective
2775: Input Parameters:
2776: + mat - the matrix
2777: - x - the vector to be multiplied
2779: Output Parameter:
2780: . y - the result
2782: Level: beginner
2784: Notes:
2785: The vectors `x` and `y` cannot be the same. I.e., one cannot
2786: call `MatMultHermitianTranspose`(A,y,y).
2788: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2790: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2792: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2793: @*/
2794: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2795: {
2796: PetscFunctionBegin;
2802: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2803: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2804: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2805: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2806: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2807: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2808: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2809: MatCheckPreallocated(mat, 1);
2811: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2812: #if defined(PETSC_USE_COMPLEX)
2813: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2814: PetscCall(VecLockReadPush(x));
2815: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2816: else PetscUseTypeMethod(mat, mult, x, y);
2817: PetscCall(VecLockReadPop(x));
2818: } else {
2819: Vec w;
2820: PetscCall(VecDuplicate(x, &w));
2821: PetscCall(VecCopy(x, w));
2822: PetscCall(VecConjugate(w));
2823: PetscCall(MatMultTranspose(mat, w, y));
2824: PetscCall(VecDestroy(&w));
2825: PetscCall(VecConjugate(y));
2826: }
2827: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2828: #else
2829: PetscCall(MatMultTranspose(mat, x, y));
2830: #endif
2831: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2832: PetscFunctionReturn(PETSC_SUCCESS);
2833: }
2835: /*@
2836: MatMultAdd - Computes $v3 = v2 + A * v1$.
2838: Neighbor-wise Collective
2840: Input Parameters:
2841: + mat - the matrix
2842: . v1 - the vector to be multiplied by `mat`
2843: - v2 - the vector to be added to the result
2845: Output Parameter:
2846: . v3 - the result
2848: Level: beginner
2850: Note:
2851: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2852: call `MatMultAdd`(A,v1,v2,v1).
2854: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2855: @*/
2856: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2857: {
2858: PetscFunctionBegin;
2865: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2866: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2867: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2868: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2869: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2870: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2871: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2872: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2873: MatCheckPreallocated(mat, 1);
2875: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2876: PetscCall(VecLockReadPush(v1));
2877: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2878: PetscCall(VecLockReadPop(v1));
2879: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2880: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2881: PetscFunctionReturn(PETSC_SUCCESS);
2882: }
2884: /*@
2885: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2887: Neighbor-wise Collective
2889: Input Parameters:
2890: + mat - the matrix
2891: . v1 - the vector to be multiplied by the transpose of the matrix
2892: - v2 - the vector to be added to the result
2894: Output Parameter:
2895: . v3 - the result
2897: Level: beginner
2899: Note:
2900: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2901: call `MatMultTransposeAdd`(A,v1,v2,v1).
2903: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2904: @*/
2905: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2906: {
2907: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2909: PetscFunctionBegin;
2916: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2917: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2918: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2919: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2920: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2921: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2922: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2923: MatCheckPreallocated(mat, 1);
2925: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2926: PetscCall(VecLockReadPush(v1));
2927: PetscCall((*op)(mat, v1, v2, v3));
2928: PetscCall(VecLockReadPop(v1));
2929: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2930: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2931: PetscFunctionReturn(PETSC_SUCCESS);
2932: }
2934: /*@
2935: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2937: Neighbor-wise Collective
2939: Input Parameters:
2940: + mat - the matrix
2941: . v1 - the vector to be multiplied by the Hermitian transpose
2942: - v2 - the vector to be added to the result
2944: Output Parameter:
2945: . v3 - the result
2947: Level: beginner
2949: Note:
2950: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2951: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2953: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2954: @*/
2955: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2956: {
2957: PetscFunctionBegin;
2964: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2965: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2966: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2967: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2968: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2969: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2970: MatCheckPreallocated(mat, 1);
2972: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2973: PetscCall(VecLockReadPush(v1));
2974: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2975: else {
2976: Vec w, z;
2977: PetscCall(VecDuplicate(v1, &w));
2978: PetscCall(VecCopy(v1, w));
2979: PetscCall(VecConjugate(w));
2980: PetscCall(VecDuplicate(v3, &z));
2981: PetscCall(MatMultTranspose(mat, w, z));
2982: PetscCall(VecDestroy(&w));
2983: PetscCall(VecConjugate(z));
2984: if (v2 != v3) {
2985: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2986: } else {
2987: PetscCall(VecAXPY(v3, 1.0, z));
2988: }
2989: PetscCall(VecDestroy(&z));
2990: }
2991: PetscCall(VecLockReadPop(v1));
2992: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2993: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2994: PetscFunctionReturn(PETSC_SUCCESS);
2995: }
2997: /*@
2998: MatGetFactorType - gets the type of factorization a matrix is
3000: Not Collective
3002: Input Parameter:
3003: . mat - the matrix
3005: Output Parameter:
3006: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3008: Level: intermediate
3010: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3011: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3012: @*/
3013: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
3014: {
3015: PetscFunctionBegin;
3018: PetscAssertPointer(t, 2);
3019: *t = mat->factortype;
3020: PetscFunctionReturn(PETSC_SUCCESS);
3021: }
3023: /*@
3024: MatSetFactorType - sets the type of factorization a matrix is
3026: Logically Collective
3028: Input Parameters:
3029: + mat - the matrix
3030: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3032: Level: intermediate
3034: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3035: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3036: @*/
3037: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
3038: {
3039: PetscFunctionBegin;
3042: mat->factortype = t;
3043: PetscFunctionReturn(PETSC_SUCCESS);
3044: }
3046: /*@
3047: MatGetInfo - Returns information about matrix storage (number of
3048: nonzeros, memory, etc.).
3050: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3052: Input Parameters:
3053: + mat - the matrix
3054: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3056: Output Parameter:
3057: . info - matrix information context
3059: Options Database Key:
3060: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3062: Level: intermediate
3064: Notes:
3065: The `MatInfo` context contains a variety of matrix data, including
3066: number of nonzeros allocated and used, number of mallocs during
3067: matrix assembly, etc. Additional information for factored matrices
3068: is provided (such as the fill ratio, number of mallocs during
3069: factorization, etc.).
3071: Example:
3072: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3073: data within the `MatInfo` context. For example,
3074: .vb
3075: MatInfo info;
3076: Mat A;
3077: double mal, nz_a, nz_u;
3079: MatGetInfo(A, MAT_LOCAL, &info);
3080: mal = info.mallocs;
3081: nz_a = info.nz_allocated;
3082: .ve
3084: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3085: @*/
3086: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3087: {
3088: PetscFunctionBegin;
3091: PetscAssertPointer(info, 3);
3092: MatCheckPreallocated(mat, 1);
3093: PetscUseTypeMethod(mat, getinfo, flag, info);
3094: PetscFunctionReturn(PETSC_SUCCESS);
3095: }
3097: /*
3098: This is used by external packages where it is not easy to get the info from the actual
3099: matrix factorization.
3100: */
3101: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3102: {
3103: PetscFunctionBegin;
3104: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3105: PetscFunctionReturn(PETSC_SUCCESS);
3106: }
3108: /*@
3109: MatLUFactor - Performs in-place LU factorization of matrix.
3111: Collective
3113: Input Parameters:
3114: + mat - the matrix
3115: . row - row permutation
3116: . col - column permutation
3117: - info - options for factorization, includes
3118: .vb
3119: fill - expected fill as ratio of original fill.
3120: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3121: Run with the option -info to determine an optimal value to use
3122: .ve
3124: Level: developer
3126: Notes:
3127: Most users should employ the `KSP` interface for linear solvers
3128: instead of working directly with matrix algebra routines such as this.
3129: See, e.g., `KSPCreate()`.
3131: This changes the state of the matrix to a factored matrix; it cannot be used
3132: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3134: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3135: when not using `KSP`.
3137: Developer Note:
3138: The Fortran interface is not autogenerated as the
3139: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3141: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3142: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3143: @*/
3144: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3145: {
3146: MatFactorInfo tinfo;
3148: PetscFunctionBegin;
3152: if (info) PetscAssertPointer(info, 4);
3154: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3155: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3156: MatCheckPreallocated(mat, 1);
3157: if (!info) {
3158: PetscCall(MatFactorInfoInitialize(&tinfo));
3159: info = &tinfo;
3160: }
3162: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3163: PetscUseTypeMethod(mat, lufactor, row, col, info);
3164: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3165: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3166: PetscFunctionReturn(PETSC_SUCCESS);
3167: }
3169: /*@
3170: MatILUFactor - Performs in-place ILU factorization of matrix.
3172: Collective
3174: Input Parameters:
3175: + mat - the matrix
3176: . row - row permutation
3177: . col - column permutation
3178: - info - structure containing
3179: .vb
3180: levels - number of levels of fill.
3181: expected fill - as ratio of original fill.
3182: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3183: missing diagonal entries)
3184: .ve
3186: Level: developer
3188: Notes:
3189: Most users should employ the `KSP` interface for linear solvers
3190: instead of working directly with matrix algebra routines such as this.
3191: See, e.g., `KSPCreate()`.
3193: Probably really in-place only when level of fill is zero, otherwise allocates
3194: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3195: when not using `KSP`.
3197: Developer Note:
3198: The Fortran interface is not autogenerated as the
3199: interface definition cannot be generated correctly [due to MatFactorInfo]
3201: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3202: @*/
3203: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3204: {
3205: PetscFunctionBegin;
3209: PetscAssertPointer(info, 4);
3211: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3212: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3213: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3214: MatCheckPreallocated(mat, 1);
3216: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3217: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3218: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3219: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3220: PetscFunctionReturn(PETSC_SUCCESS);
3221: }
3223: /*@
3224: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3225: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3227: Collective
3229: Input Parameters:
3230: + fact - the factor matrix obtained with `MatGetFactor()`
3231: . mat - the matrix
3232: . row - the row permutation
3233: . col - the column permutation
3234: - info - options for factorization, includes
3235: .vb
3236: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3237: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3238: .ve
3240: Level: developer
3242: Notes:
3243: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3245: Most users should employ the simplified `KSP` interface for linear solvers
3246: instead of working directly with matrix algebra routines such as this.
3247: See, e.g., `KSPCreate()`.
3249: Developer Note:
3250: The Fortran interface is not autogenerated as the
3251: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3253: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3254: @*/
3255: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3256: {
3257: MatFactorInfo tinfo;
3259: PetscFunctionBegin;
3264: if (info) PetscAssertPointer(info, 5);
3267: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3268: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3269: MatCheckPreallocated(mat, 2);
3270: if (!info) {
3271: PetscCall(MatFactorInfoInitialize(&tinfo));
3272: info = &tinfo;
3273: }
3275: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3276: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3277: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3278: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3279: PetscFunctionReturn(PETSC_SUCCESS);
3280: }
3282: /*@
3283: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3284: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3286: Collective
3288: Input Parameters:
3289: + fact - the factor matrix obtained with `MatGetFactor()`
3290: . mat - the matrix
3291: - info - options for factorization
3293: Level: developer
3295: Notes:
3296: See `MatLUFactor()` for in-place factorization. See
3297: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3299: Most users should employ the `KSP` interface for linear solvers
3300: instead of working directly with matrix algebra routines such as this.
3301: See, e.g., `KSPCreate()`.
3303: Developer Note:
3304: The Fortran interface is not autogenerated as the
3305: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3307: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3308: @*/
3309: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3310: {
3311: MatFactorInfo tinfo;
3313: PetscFunctionBegin;
3318: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3319: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3320: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3322: MatCheckPreallocated(mat, 2);
3323: if (!info) {
3324: PetscCall(MatFactorInfoInitialize(&tinfo));
3325: info = &tinfo;
3326: }
3328: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3329: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3330: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3331: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3332: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3333: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3334: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3335: PetscFunctionReturn(PETSC_SUCCESS);
3336: }
3338: /*@
3339: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3340: symmetric matrix.
3342: Collective
3344: Input Parameters:
3345: + mat - the matrix
3346: . perm - row and column permutations
3347: - info - expected fill as ratio of original fill
3349: Level: developer
3351: Notes:
3352: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3353: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3355: Most users should employ the `KSP` interface for linear solvers
3356: instead of working directly with matrix algebra routines such as this.
3357: See, e.g., `KSPCreate()`.
3359: Developer Note:
3360: The Fortran interface is not autogenerated as the
3361: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3363: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3364: `MatGetOrdering()`
3365: @*/
3366: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3367: {
3368: MatFactorInfo tinfo;
3370: PetscFunctionBegin;
3373: if (info) PetscAssertPointer(info, 3);
3375: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3376: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3377: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3378: MatCheckPreallocated(mat, 1);
3379: if (!info) {
3380: PetscCall(MatFactorInfoInitialize(&tinfo));
3381: info = &tinfo;
3382: }
3384: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3385: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3386: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3387: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3388: PetscFunctionReturn(PETSC_SUCCESS);
3389: }
3391: /*@
3392: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3393: of a symmetric matrix.
3395: Collective
3397: Input Parameters:
3398: + fact - the factor matrix obtained with `MatGetFactor()`
3399: . mat - the matrix
3400: . perm - row and column permutations
3401: - info - options for factorization, includes
3402: .vb
3403: fill - expected fill as ratio of original fill.
3404: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3405: Run with the option -info to determine an optimal value to use
3406: .ve
3408: Level: developer
3410: Notes:
3411: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3412: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3414: Most users should employ the `KSP` interface for linear solvers
3415: instead of working directly with matrix algebra routines such as this.
3416: See, e.g., `KSPCreate()`.
3418: Developer Note:
3419: The Fortran interface is not autogenerated as the
3420: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3422: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3423: `MatGetOrdering()`
3424: @*/
3425: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3426: {
3427: MatFactorInfo tinfo;
3429: PetscFunctionBegin;
3433: if (info) PetscAssertPointer(info, 4);
3436: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3437: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3438: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3439: MatCheckPreallocated(mat, 2);
3440: if (!info) {
3441: PetscCall(MatFactorInfoInitialize(&tinfo));
3442: info = &tinfo;
3443: }
3445: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3446: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3447: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3448: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3449: PetscFunctionReturn(PETSC_SUCCESS);
3450: }
3452: /*@
3453: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3454: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3455: `MatCholeskyFactorSymbolic()`.
3457: Collective
3459: Input Parameters:
3460: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3461: . mat - the initial matrix that is to be factored
3462: - info - options for factorization
3464: Level: developer
3466: Note:
3467: Most users should employ the `KSP` interface for linear solvers
3468: instead of working directly with matrix algebra routines such as this.
3469: See, e.g., `KSPCreate()`.
3471: Developer Note:
3472: The Fortran interface is not autogenerated as the
3473: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3475: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3476: @*/
3477: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3478: {
3479: MatFactorInfo tinfo;
3481: PetscFunctionBegin;
3486: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3487: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3488: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3489: MatCheckPreallocated(mat, 2);
3490: if (!info) {
3491: PetscCall(MatFactorInfoInitialize(&tinfo));
3492: info = &tinfo;
3493: }
3495: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3496: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3497: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3498: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3499: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3500: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3501: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3502: PetscFunctionReturn(PETSC_SUCCESS);
3503: }
3505: /*@
3506: MatQRFactor - Performs in-place QR factorization of matrix.
3508: Collective
3510: Input Parameters:
3511: + mat - the matrix
3512: . col - column permutation
3513: - info - options for factorization, includes
3514: .vb
3515: fill - expected fill as ratio of original fill.
3516: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3517: Run with the option -info to determine an optimal value to use
3518: .ve
3520: Level: developer
3522: Notes:
3523: Most users should employ the `KSP` interface for linear solvers
3524: instead of working directly with matrix algebra routines such as this.
3525: See, e.g., `KSPCreate()`.
3527: This changes the state of the matrix to a factored matrix; it cannot be used
3528: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3530: Developer Note:
3531: The Fortran interface is not autogenerated as the
3532: interface definition cannot be generated correctly [due to MatFactorInfo]
3534: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3535: `MatSetUnfactored()`
3536: @*/
3537: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3538: {
3539: PetscFunctionBegin;
3542: if (info) PetscAssertPointer(info, 3);
3544: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3545: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3546: MatCheckPreallocated(mat, 1);
3547: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3548: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3549: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3550: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3551: PetscFunctionReturn(PETSC_SUCCESS);
3552: }
3554: /*@
3555: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3556: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3558: Collective
3560: Input Parameters:
3561: + fact - the factor matrix obtained with `MatGetFactor()`
3562: . mat - the matrix
3563: . col - column permutation
3564: - info - options for factorization, includes
3565: .vb
3566: fill - expected fill as ratio of original fill.
3567: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3568: Run with the option -info to determine an optimal value to use
3569: .ve
3571: Level: developer
3573: Note:
3574: Most users should employ the `KSP` interface for linear solvers
3575: instead of working directly with matrix algebra routines such as this.
3576: See, e.g., `KSPCreate()`.
3578: Developer Note:
3579: The Fortran interface is not autogenerated as the
3580: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3582: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3583: @*/
3584: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3585: {
3586: MatFactorInfo tinfo;
3588: PetscFunctionBegin;
3592: if (info) PetscAssertPointer(info, 4);
3595: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3596: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3597: MatCheckPreallocated(mat, 2);
3598: if (!info) {
3599: PetscCall(MatFactorInfoInitialize(&tinfo));
3600: info = &tinfo;
3601: }
3603: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3604: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3605: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3606: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3607: PetscFunctionReturn(PETSC_SUCCESS);
3608: }
3610: /*@
3611: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3612: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3614: Collective
3616: Input Parameters:
3617: + fact - the factor matrix obtained with `MatGetFactor()`
3618: . mat - the matrix
3619: - info - options for factorization
3621: Level: developer
3623: Notes:
3624: See `MatQRFactor()` for in-place factorization.
3626: Most users should employ the `KSP` interface for linear solvers
3627: instead of working directly with matrix algebra routines such as this.
3628: See, e.g., `KSPCreate()`.
3630: Developer Note:
3631: The Fortran interface is not autogenerated as the
3632: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3634: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3635: @*/
3636: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3637: {
3638: MatFactorInfo tinfo;
3640: PetscFunctionBegin;
3645: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3646: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3647: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3649: MatCheckPreallocated(mat, 2);
3650: if (!info) {
3651: PetscCall(MatFactorInfoInitialize(&tinfo));
3652: info = &tinfo;
3653: }
3655: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3656: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3657: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3658: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3659: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3660: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3661: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3662: PetscFunctionReturn(PETSC_SUCCESS);
3663: }
3665: /*@
3666: MatSolve - Solves $A x = b$, given a factored matrix.
3668: Neighbor-wise Collective
3670: Input Parameters:
3671: + mat - the factored matrix
3672: - b - the right-hand-side vector
3674: Output Parameter:
3675: . x - the result vector
3677: Level: developer
3679: Notes:
3680: The vectors `b` and `x` cannot be the same. I.e., one cannot
3681: call `MatSolve`(A,x,x).
3683: Most users should employ the `KSP` interface for linear solvers
3684: instead of working directly with matrix algebra routines such as this.
3685: See, e.g., `KSPCreate()`.
3687: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3688: @*/
3689: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3690: {
3691: PetscFunctionBegin;
3696: PetscCheckSameComm(mat, 1, b, 2);
3697: PetscCheckSameComm(mat, 1, x, 3);
3698: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3699: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3700: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3701: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3702: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3703: MatCheckPreallocated(mat, 1);
3705: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3706: PetscCall(VecFlag(x, mat->factorerrortype));
3707: if (mat->factorerrortype) {
3708: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3709: } else PetscUseTypeMethod(mat, solve, b, x);
3710: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3711: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3712: PetscFunctionReturn(PETSC_SUCCESS);
3713: }
3715: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3716: {
3717: Vec b, x;
3718: PetscInt N, i;
3719: PetscErrorCode (*f)(Mat, Vec, Vec);
3720: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3722: PetscFunctionBegin;
3723: if (A->factorerrortype) {
3724: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3725: PetscCall(MatSetInf(X));
3726: PetscFunctionReturn(PETSC_SUCCESS);
3727: }
3728: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3729: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3730: PetscCall(MatBoundToCPU(A, &Abound));
3731: if (!Abound) {
3732: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3733: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3734: }
3735: #if PetscDefined(HAVE_CUDA)
3736: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3737: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3738: #elif PetscDefined(HAVE_HIP)
3739: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3740: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3741: #endif
3742: PetscCall(MatGetSize(B, NULL, &N));
3743: for (i = 0; i < N; i++) {
3744: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3745: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3746: PetscCall((*f)(A, b, x));
3747: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3748: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3749: }
3750: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3751: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3752: PetscFunctionReturn(PETSC_SUCCESS);
3753: }
3755: /*@
3756: MatMatSolve - Solves $A X = B$, given a factored matrix.
3758: Neighbor-wise Collective
3760: Input Parameters:
3761: + A - the factored matrix
3762: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3764: Output Parameter:
3765: . X - the result matrix (dense matrix)
3767: Level: developer
3769: Note:
3770: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3771: otherwise, `B` and `X` cannot be the same.
3773: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3774: @*/
3775: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3776: {
3777: PetscFunctionBegin;
3782: PetscCheckSameComm(A, 1, B, 2);
3783: PetscCheckSameComm(A, 1, X, 3);
3784: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3785: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3786: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3787: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3788: MatCheckPreallocated(A, 1);
3790: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3791: if (!A->ops->matsolve) {
3792: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3793: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3794: } else PetscUseTypeMethod(A, matsolve, B, X);
3795: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3796: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3797: PetscFunctionReturn(PETSC_SUCCESS);
3798: }
3800: /*@
3801: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3803: Neighbor-wise Collective
3805: Input Parameters:
3806: + A - the factored matrix
3807: - B - the right-hand-side matrix (`MATDENSE` matrix)
3809: Output Parameter:
3810: . X - the result matrix (dense matrix)
3812: Level: developer
3814: Note:
3815: The matrices `B` and `X` cannot be the same. I.e., one cannot
3816: call `MatMatSolveTranspose`(A,X,X).
3818: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3819: @*/
3820: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3821: {
3822: PetscFunctionBegin;
3827: PetscCheckSameComm(A, 1, B, 2);
3828: PetscCheckSameComm(A, 1, X, 3);
3829: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3830: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3831: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3832: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3833: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3834: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3835: MatCheckPreallocated(A, 1);
3837: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3838: if (!A->ops->matsolvetranspose) {
3839: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3840: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3841: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3842: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3843: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3844: PetscFunctionReturn(PETSC_SUCCESS);
3845: }
3847: /*@
3848: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3850: Neighbor-wise Collective
3852: Input Parameters:
3853: + A - the factored matrix
3854: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3856: Output Parameter:
3857: . X - the result matrix (dense matrix)
3859: Level: developer
3861: Note:
3862: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3863: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3865: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3866: @*/
3867: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3868: {
3869: PetscFunctionBegin;
3874: PetscCheckSameComm(A, 1, Bt, 2);
3875: PetscCheckSameComm(A, 1, X, 3);
3877: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3878: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3879: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3880: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3881: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3882: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3883: MatCheckPreallocated(A, 1);
3885: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3886: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3887: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3888: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3889: PetscFunctionReturn(PETSC_SUCCESS);
3890: }
3892: /*@
3893: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3894: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3896: Neighbor-wise Collective
3898: Input Parameters:
3899: + mat - the factored matrix
3900: - b - the right-hand-side vector
3902: Output Parameter:
3903: . x - the result vector
3905: Level: developer
3907: Notes:
3908: `MatSolve()` should be used for most applications, as it performs
3909: a forward solve followed by a backward solve.
3911: The vectors `b` and `x` cannot be the same, i.e., one cannot
3912: call `MatForwardSolve`(A,x,x).
3914: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3915: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3916: `MatForwardSolve()` solves $U^T*D y = b$, and
3917: `MatBackwardSolve()` solves $U x = y$.
3918: Thus they do not provide a symmetric preconditioner.
3920: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3921: @*/
3922: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3923: {
3924: PetscFunctionBegin;
3929: PetscCheckSameComm(mat, 1, b, 2);
3930: PetscCheckSameComm(mat, 1, x, 3);
3931: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3932: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3933: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3934: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3935: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3936: MatCheckPreallocated(mat, 1);
3938: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3939: PetscUseTypeMethod(mat, forwardsolve, b, x);
3940: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3941: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3942: PetscFunctionReturn(PETSC_SUCCESS);
3943: }
3945: /*@
3946: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3947: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3949: Neighbor-wise Collective
3951: Input Parameters:
3952: + mat - the factored matrix
3953: - b - the right-hand-side vector
3955: Output Parameter:
3956: . x - the result vector
3958: Level: developer
3960: Notes:
3961: `MatSolve()` should be used for most applications, as it performs
3962: a forward solve followed by a backward solve.
3964: The vectors `b` and `x` cannot be the same. I.e., one cannot
3965: call `MatBackwardSolve`(A,x,x).
3967: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3968: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3969: `MatForwardSolve()` solves $U^T*D y = b$, and
3970: `MatBackwardSolve()` solves $U x = y$.
3971: Thus they do not provide a symmetric preconditioner.
3973: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3974: @*/
3975: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3976: {
3977: PetscFunctionBegin;
3982: PetscCheckSameComm(mat, 1, b, 2);
3983: PetscCheckSameComm(mat, 1, x, 3);
3984: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3985: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3986: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3987: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3988: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3989: MatCheckPreallocated(mat, 1);
3991: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3992: PetscUseTypeMethod(mat, backwardsolve, b, x);
3993: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3994: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3995: PetscFunctionReturn(PETSC_SUCCESS);
3996: }
3998: /*@
3999: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
4001: Neighbor-wise Collective
4003: Input Parameters:
4004: + mat - the factored matrix
4005: . b - the right-hand-side vector
4006: - y - the vector to be added to
4008: Output Parameter:
4009: . x - the result vector
4011: Level: developer
4013: Note:
4014: The vectors `b` and `x` cannot be the same. I.e., one cannot
4015: call `MatSolveAdd`(A,x,y,x).
4017: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
4018: @*/
4019: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
4020: {
4021: PetscScalar one = 1.0;
4022: Vec tmp;
4024: PetscFunctionBegin;
4030: PetscCheckSameComm(mat, 1, b, 2);
4031: PetscCheckSameComm(mat, 1, y, 3);
4032: PetscCheckSameComm(mat, 1, x, 4);
4033: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4034: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4035: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4036: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4037: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4038: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4039: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4040: MatCheckPreallocated(mat, 1);
4042: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4043: PetscCall(VecFlag(x, mat->factorerrortype));
4044: if (mat->factorerrortype) {
4045: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4046: } else if (mat->ops->solveadd) {
4047: PetscUseTypeMethod(mat, solveadd, b, y, x);
4048: } else {
4049: /* do the solve then the add manually */
4050: if (x != y) {
4051: PetscCall(MatSolve(mat, b, x));
4052: PetscCall(VecAXPY(x, one, y));
4053: } else {
4054: PetscCall(VecDuplicate(x, &tmp));
4055: PetscCall(VecCopy(x, tmp));
4056: PetscCall(MatSolve(mat, b, x));
4057: PetscCall(VecAXPY(x, one, tmp));
4058: PetscCall(VecDestroy(&tmp));
4059: }
4060: }
4061: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4062: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4063: PetscFunctionReturn(PETSC_SUCCESS);
4064: }
4066: /*@
4067: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4069: Neighbor-wise Collective
4071: Input Parameters:
4072: + mat - the factored matrix
4073: - b - the right-hand-side vector
4075: Output Parameter:
4076: . x - the result vector
4078: Level: developer
4080: Notes:
4081: The vectors `b` and `x` cannot be the same. I.e., one cannot
4082: call `MatSolveTranspose`(A,x,x).
4084: Most users should employ the `KSP` interface for linear solvers
4085: instead of working directly with matrix algebra routines such as this.
4086: See, e.g., `KSPCreate()`.
4088: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4089: @*/
4090: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4091: {
4092: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4094: PetscFunctionBegin;
4099: PetscCheckSameComm(mat, 1, b, 2);
4100: PetscCheckSameComm(mat, 1, x, 3);
4101: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4102: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4103: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4104: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4105: MatCheckPreallocated(mat, 1);
4106: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4107: PetscCall(VecFlag(x, mat->factorerrortype));
4108: if (mat->factorerrortype) {
4109: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4110: } else {
4111: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4112: PetscCall((*f)(mat, b, x));
4113: }
4114: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4115: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4116: PetscFunctionReturn(PETSC_SUCCESS);
4117: }
4119: /*@
4120: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4121: factored matrix.
4123: Neighbor-wise Collective
4125: Input Parameters:
4126: + mat - the factored matrix
4127: . b - the right-hand-side vector
4128: - y - the vector to be added to
4130: Output Parameter:
4131: . x - the result vector
4133: Level: developer
4135: Note:
4136: The vectors `b` and `x` cannot be the same. I.e., one cannot
4137: call `MatSolveTransposeAdd`(A,x,y,x).
4139: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4140: @*/
4141: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4142: {
4143: PetscScalar one = 1.0;
4144: Vec tmp;
4145: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4147: PetscFunctionBegin;
4153: PetscCheckSameComm(mat, 1, b, 2);
4154: PetscCheckSameComm(mat, 1, y, 3);
4155: PetscCheckSameComm(mat, 1, x, 4);
4156: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4157: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4158: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4159: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4160: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4161: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4162: MatCheckPreallocated(mat, 1);
4164: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4165: PetscCall(VecFlag(x, mat->factorerrortype));
4166: if (mat->factorerrortype) {
4167: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4168: } else if (f) {
4169: PetscCall((*f)(mat, b, y, x));
4170: } else {
4171: /* do the solve then the add manually */
4172: if (x != y) {
4173: PetscCall(MatSolveTranspose(mat, b, x));
4174: PetscCall(VecAXPY(x, one, y));
4175: } else {
4176: PetscCall(VecDuplicate(x, &tmp));
4177: PetscCall(VecCopy(x, tmp));
4178: PetscCall(MatSolveTranspose(mat, b, x));
4179: PetscCall(VecAXPY(x, one, tmp));
4180: PetscCall(VecDestroy(&tmp));
4181: }
4182: }
4183: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4184: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4185: PetscFunctionReturn(PETSC_SUCCESS);
4186: }
4188: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4189: /*@
4190: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4192: Neighbor-wise Collective
4194: Input Parameters:
4195: + mat - the matrix
4196: . b - the right-hand side
4197: . omega - the relaxation factor
4198: . flag - flag indicating the type of SOR (see below)
4199: . shift - diagonal shift
4200: . its - the number of iterations
4201: - lits - the number of local iterations
4203: Output Parameter:
4204: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4206: SOR Flags:
4207: + `SOR_FORWARD_SWEEP` - forward SOR
4208: . `SOR_BACKWARD_SWEEP` - backward SOR
4209: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4210: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4211: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4212: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4213: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4214: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4215: upper/lower triangular part of matrix to
4216: vector (with omega)
4217: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4219: Level: developer
4221: Notes:
4222: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4223: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4224: on each processor.
4226: Application programmers will not generally use `MatSOR()` directly,
4227: but instead will employ the `KSP`/`PC` interface.
4229: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4231: Most users should employ the `KSP` interface for linear solvers
4232: instead of working directly with matrix algebra routines such as this.
4233: See, e.g., `KSPCreate()`.
4235: Vectors `x` and `b` CANNOT be the same
4237: The flags are implemented as bitwise inclusive or operations.
4238: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4239: to specify a zero initial guess for SSOR.
4241: Developer Note:
4242: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4244: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4245: @*/
4246: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4247: {
4248: PetscFunctionBegin;
4253: PetscCheckSameComm(mat, 1, b, 2);
4254: PetscCheckSameComm(mat, 1, x, 8);
4255: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4256: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4257: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4258: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4259: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4260: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4261: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4262: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4264: MatCheckPreallocated(mat, 1);
4265: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4266: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4267: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4268: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4269: PetscFunctionReturn(PETSC_SUCCESS);
4270: }
4272: /*
4273: Default matrix copy routine.
4274: */
4275: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4276: {
4277: PetscInt i, rstart = 0, rend = 0, nz;
4278: const PetscInt *cwork;
4279: const PetscScalar *vwork;
4281: PetscFunctionBegin;
4282: if (B->assembled) PetscCall(MatZeroEntries(B));
4283: if (str == SAME_NONZERO_PATTERN) {
4284: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4285: for (i = rstart; i < rend; i++) {
4286: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4287: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4288: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4289: }
4290: } else {
4291: PetscCall(MatAYPX(B, 0.0, A, str));
4292: }
4293: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4294: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4295: PetscFunctionReturn(PETSC_SUCCESS);
4296: }
4298: /*@
4299: MatCopy - Copies a matrix to another matrix.
4301: Collective
4303: Input Parameters:
4304: + A - the matrix
4305: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4307: Output Parameter:
4308: . B - where the copy is put
4310: Level: intermediate
4312: Notes:
4313: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4315: `MatCopy()` copies the matrix entries of a matrix to another existing
4316: matrix (after first zeroing the second matrix). A related routine is
4317: `MatConvert()`, which first creates a new matrix and then copies the data.
4319: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4320: @*/
4321: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4322: {
4323: PetscInt i;
4325: PetscFunctionBegin;
4330: PetscCheckSameComm(A, 1, B, 2);
4331: MatCheckPreallocated(B, 2);
4332: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4333: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4334: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4335: A->cmap->N, B->cmap->N);
4336: MatCheckPreallocated(A, 1);
4337: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4339: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4340: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4341: else PetscCall(MatCopy_Basic(A, B, str));
4343: B->stencil.dim = A->stencil.dim;
4344: B->stencil.noc = A->stencil.noc;
4345: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4346: B->stencil.dims[i] = A->stencil.dims[i];
4347: B->stencil.starts[i] = A->stencil.starts[i];
4348: }
4350: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4351: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4352: PetscFunctionReturn(PETSC_SUCCESS);
4353: }
4355: /*@
4356: MatConvert - Converts a matrix to another matrix, either of the same
4357: or different type.
4359: Collective
4361: Input Parameters:
4362: + mat - the matrix
4363: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4364: same type as the original matrix.
4365: - reuse - denotes if the destination matrix is to be created or reused.
4366: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input `Mat` to be changed to contain the matrix in the new format), otherwise use
4367: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4369: Output Parameter:
4370: . M - pointer to place new matrix
4372: Level: intermediate
4374: Notes:
4375: `MatConvert()` first creates a new matrix and then copies the data from
4376: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4377: entries of one matrix to another already existing matrix context.
4379: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4380: the MPI communicator of the generated matrix is always the same as the communicator
4381: of the input matrix.
4383: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4384: @*/
4385: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4386: {
4387: PetscBool sametype, issame, flg;
4388: PetscBool3 issymmetric, ishermitian;
4389: char convname[256], mtype[256];
4390: Mat B;
4392: PetscFunctionBegin;
4395: PetscAssertPointer(M, 4);
4396: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4397: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4398: MatCheckPreallocated(mat, 1);
4400: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4401: if (flg) newtype = mtype;
4403: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4404: PetscCall(PetscStrcmp(newtype, "same", &issame));
4405: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4406: if (reuse == MAT_REUSE_MATRIX) {
4408: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4409: }
4411: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4412: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4413: PetscFunctionReturn(PETSC_SUCCESS);
4414: }
4416: /* Cache Mat options because some converters use MatHeaderReplace */
4417: issymmetric = mat->symmetric;
4418: ishermitian = mat->hermitian;
4420: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4421: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4422: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4423: } else {
4424: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4425: const char *prefix[3] = {"seq", "mpi", ""};
4426: PetscInt i;
4427: /*
4428: Order of precedence:
4429: 0) See if newtype is a superclass of the current matrix.
4430: 1) See if a specialized converter is known to the current matrix.
4431: 2) See if a specialized converter is known to the desired matrix class.
4432: 3) See if a good general converter is registered for the desired class
4433: (as of 6/27/03 only MATMPIADJ falls into this category).
4434: 4) See if a good general converter is known for the current matrix.
4435: 5) Use a really basic converter.
4436: */
4438: /* 0) See if newtype is a superclass of the current matrix.
4439: i.e mat is mpiaij and newtype is aij */
4440: for (i = 0; i < 2; i++) {
4441: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4442: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4443: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4444: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4445: if (flg) {
4446: if (reuse == MAT_INPLACE_MATRIX) {
4447: PetscCall(PetscInfo(mat, "Early return\n"));
4448: PetscFunctionReturn(PETSC_SUCCESS);
4449: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4450: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4451: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4452: PetscFunctionReturn(PETSC_SUCCESS);
4453: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4454: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4455: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4456: PetscFunctionReturn(PETSC_SUCCESS);
4457: }
4458: }
4459: }
4460: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4461: for (i = 0; i < 3; i++) {
4462: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4463: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4464: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4465: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4466: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4467: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4468: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4469: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4470: if (conv) goto foundconv;
4471: }
4473: /* 2) See if a specialized converter is known to the desired matrix class. */
4474: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4475: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4476: PetscCall(MatSetType(B, newtype));
4477: for (i = 0; i < 3; i++) {
4478: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4479: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4480: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4481: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4482: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4483: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4484: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4485: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4486: if (conv) {
4487: PetscCall(MatDestroy(&B));
4488: goto foundconv;
4489: }
4490: }
4492: /* 3) See if a good general converter is registered for the desired class */
4493: conv = B->ops->convertfrom;
4494: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4495: PetscCall(MatDestroy(&B));
4496: if (conv) goto foundconv;
4498: /* 4) See if a good general converter is known for the current matrix */
4499: if (mat->ops->convert) conv = mat->ops->convert;
4500: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4501: if (conv) goto foundconv;
4503: /* 5) Use a really basic converter. */
4504: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4505: conv = MatConvert_Basic;
4507: foundconv:
4508: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4509: PetscCall((*conv)(mat, newtype, reuse, M));
4510: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4511: /* the block sizes must be same if the mappings are copied over */
4512: (*M)->rmap->bs = mat->rmap->bs;
4513: (*M)->cmap->bs = mat->cmap->bs;
4514: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4515: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4516: (*M)->rmap->mapping = mat->rmap->mapping;
4517: (*M)->cmap->mapping = mat->cmap->mapping;
4518: }
4519: (*M)->stencil.dim = mat->stencil.dim;
4520: (*M)->stencil.noc = mat->stencil.noc;
4521: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4522: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4523: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4524: }
4525: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4526: }
4527: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4529: /* Copy Mat options */
4530: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4531: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4532: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4533: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4534: PetscFunctionReturn(PETSC_SUCCESS);
4535: }
4537: /*@
4538: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4540: Not Collective
4542: Input Parameter:
4543: . mat - the matrix, must be a factored matrix
4545: Output Parameter:
4546: . type - the string name of the package (do not free this string)
4548: Level: intermediate
4550: Fortran Note:
4551: Pass in an empty string that is long enough and the package name will be copied into it.
4553: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4554: @*/
4555: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4556: {
4557: PetscErrorCode (*conv)(Mat, MatSolverType *);
4559: PetscFunctionBegin;
4562: PetscAssertPointer(type, 2);
4563: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4564: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4565: if (conv) PetscCall((*conv)(mat, type));
4566: else *type = MATSOLVERPETSC;
4567: PetscFunctionReturn(PETSC_SUCCESS);
4568: }
4570: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4571: struct _MatSolverTypeForSpecifcType {
4572: MatType mtype;
4573: /* no entry for MAT_FACTOR_NONE */
4574: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4575: MatSolverTypeForSpecifcType next;
4576: };
4578: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4579: struct _MatSolverTypeHolder {
4580: char *name;
4581: MatSolverTypeForSpecifcType handlers;
4582: MatSolverTypeHolder next;
4583: };
4585: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4587: /*@C
4588: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4590: Logically Collective, No Fortran Support
4592: Input Parameters:
4593: + package - name of the package, for example petsc or superlu
4594: . mtype - the matrix type that works with this package
4595: . ftype - the type of factorization supported by the package
4596: - createfactor - routine that will create the factored matrix ready to be used
4598: Level: developer
4600: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4601: `MatGetFactor()`
4602: @*/
4603: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4604: {
4605: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4606: PetscBool flg;
4607: MatSolverTypeForSpecifcType inext, iprev = NULL;
4609: PetscFunctionBegin;
4610: PetscCall(MatInitializePackage());
4611: if (!next) {
4612: PetscCall(PetscNew(&MatSolverTypeHolders));
4613: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4614: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4615: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4616: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4617: PetscFunctionReturn(PETSC_SUCCESS);
4618: }
4619: while (next) {
4620: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4621: if (flg) {
4622: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4623: inext = next->handlers;
4624: while (inext) {
4625: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4626: if (flg) {
4627: inext->createfactor[(int)ftype - 1] = createfactor;
4628: PetscFunctionReturn(PETSC_SUCCESS);
4629: }
4630: iprev = inext;
4631: inext = inext->next;
4632: }
4633: PetscCall(PetscNew(&iprev->next));
4634: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4635: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4636: PetscFunctionReturn(PETSC_SUCCESS);
4637: }
4638: prev = next;
4639: next = next->next;
4640: }
4641: PetscCall(PetscNew(&prev->next));
4642: PetscCall(PetscStrallocpy(package, &prev->next->name));
4643: PetscCall(PetscNew(&prev->next->handlers));
4644: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4645: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4646: PetscFunctionReturn(PETSC_SUCCESS);
4647: }
4649: /*@C
4650: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4652: Input Parameters:
4653: + type - name of the package, for example petsc or superlu, if this is 'NULL', then the first result that satisfies the other criteria is returned
4654: . ftype - the type of factorization supported by the type
4655: - mtype - the matrix type that works with this type
4657: Output Parameters:
4658: + foundtype - `PETSC_TRUE` if the type was registered
4659: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4660: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4662: Calling sequence of `createfactor`:
4663: + A - the matrix providing the factor matrix
4664: . ftype - the `MatFactorType` of the factor requested
4665: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4667: Level: developer
4669: Note:
4670: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4671: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4672: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4674: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4675: `MatInitializePackage()`
4676: @*/
4677: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4678: {
4679: MatSolverTypeHolder next = MatSolverTypeHolders;
4680: PetscBool flg;
4681: MatSolverTypeForSpecifcType inext;
4683: PetscFunctionBegin;
4684: if (foundtype) *foundtype = PETSC_FALSE;
4685: if (foundmtype) *foundmtype = PETSC_FALSE;
4686: if (createfactor) *createfactor = NULL;
4688: if (type) {
4689: while (next) {
4690: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4691: if (flg) {
4692: if (foundtype) *foundtype = PETSC_TRUE;
4693: inext = next->handlers;
4694: while (inext) {
4695: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4696: if (flg) {
4697: if (foundmtype) *foundmtype = PETSC_TRUE;
4698: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4699: PetscFunctionReturn(PETSC_SUCCESS);
4700: }
4701: inext = inext->next;
4702: }
4703: }
4704: next = next->next;
4705: }
4706: } else {
4707: while (next) {
4708: inext = next->handlers;
4709: while (inext) {
4710: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4711: if (flg && inext->createfactor[(int)ftype - 1]) {
4712: if (foundtype) *foundtype = PETSC_TRUE;
4713: if (foundmtype) *foundmtype = PETSC_TRUE;
4714: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4715: PetscFunctionReturn(PETSC_SUCCESS);
4716: }
4717: inext = inext->next;
4718: }
4719: next = next->next;
4720: }
4721: /* try with base classes inext->mtype */
4722: next = MatSolverTypeHolders;
4723: while (next) {
4724: inext = next->handlers;
4725: while (inext) {
4726: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4727: if (flg && inext->createfactor[(int)ftype - 1]) {
4728: if (foundtype) *foundtype = PETSC_TRUE;
4729: if (foundmtype) *foundmtype = PETSC_TRUE;
4730: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4731: PetscFunctionReturn(PETSC_SUCCESS);
4732: }
4733: inext = inext->next;
4734: }
4735: next = next->next;
4736: }
4737: }
4738: PetscFunctionReturn(PETSC_SUCCESS);
4739: }
4741: PetscErrorCode MatSolverTypeDestroy(void)
4742: {
4743: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4744: MatSolverTypeForSpecifcType inext, iprev;
4746: PetscFunctionBegin;
4747: while (next) {
4748: PetscCall(PetscFree(next->name));
4749: inext = next->handlers;
4750: while (inext) {
4751: PetscCall(PetscFree(inext->mtype));
4752: iprev = inext;
4753: inext = inext->next;
4754: PetscCall(PetscFree(iprev));
4755: }
4756: prev = next;
4757: next = next->next;
4758: PetscCall(PetscFree(prev));
4759: }
4760: MatSolverTypeHolders = NULL;
4761: PetscFunctionReturn(PETSC_SUCCESS);
4762: }
4764: /*@
4765: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4767: Logically Collective
4769: Input Parameter:
4770: . mat - the matrix
4772: Output Parameter:
4773: . flg - `PETSC_TRUE` if uses the ordering
4775: Level: developer
4777: Note:
4778: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4779: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4781: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4782: @*/
4783: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4784: {
4785: PetscFunctionBegin;
4786: *flg = mat->canuseordering;
4787: PetscFunctionReturn(PETSC_SUCCESS);
4788: }
4790: /*@
4791: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4793: Logically Collective
4795: Input Parameters:
4796: + mat - the matrix obtained with `MatGetFactor()`
4797: - ftype - the factorization type to be used
4799: Output Parameter:
4800: . otype - the preferred ordering type
4802: Level: developer
4804: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4805: @*/
4806: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4807: {
4808: PetscFunctionBegin;
4809: *otype = mat->preferredordering[ftype];
4810: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4811: PetscFunctionReturn(PETSC_SUCCESS);
4812: }
4814: /*@
4815: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()
4817: Collective
4819: Input Parameters:
4820: + mat - the matrix
4821: . type - name of solver type, for example, superlu, petsc (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4822: the other criteria is returned
4823: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4825: Output Parameter:
4826: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4828: Options Database Keys:
4829: + -pc_factor_mat_solver_type <type> - choose the type at run time. When using `KSP` solvers
4830: . -pc_factor_mat_factor_on_host <bool> - do mat factorization on host (with device matrices). Default is doing it on device
4831: - -pc_factor_mat_solve_on_host <bool> - do mat solve on host (with device matrices). Default is doing it on device
4833: Level: intermediate
4835: Notes:
4836: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4837: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4839: Users usually access the factorization solvers via `KSP`
4841: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4842: such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir
4844: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4845: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4846: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4848: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4849: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4850: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4852: Developer Note:
4853: This should actually be called `MatCreateFactor()` since it creates a new factor object
4855: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4856: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4857: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4858: @*/
4859: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4860: {
4861: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4862: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4864: PetscFunctionBegin;
4868: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4869: MatCheckPreallocated(mat, 1);
4871: PetscCall(MatIsShell(mat, &shell));
4872: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4873: if (hasop) {
4874: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4875: PetscFunctionReturn(PETSC_SUCCESS);
4876: }
4878: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4879: if (!foundtype) {
4880: if (type) {
4881: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4882: ((PetscObject)mat)->type_name, type);
4883: } else {
4884: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4885: }
4886: }
4887: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4888: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4890: PetscCall((*conv)(mat, ftype, f));
4891: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4892: PetscFunctionReturn(PETSC_SUCCESS);
4893: }
4895: /*@
4896: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4898: Not Collective
4900: Input Parameters:
4901: + mat - the matrix
4902: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4903: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4905: Output Parameter:
4906: . flg - PETSC_TRUE if the factorization is available
4908: Level: intermediate
4910: Notes:
4911: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4912: such as pastix, superlu, mumps etc.
4914: PETSc must have been ./configure to use the external solver, using the option --download-package
4916: Developer Note:
4917: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4919: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4920: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4921: @*/
4922: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4923: {
4924: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4926: PetscFunctionBegin;
4928: PetscAssertPointer(flg, 4);
4930: *flg = PETSC_FALSE;
4931: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
4933: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4934: MatCheckPreallocated(mat, 1);
4936: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4937: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4938: PetscFunctionReturn(PETSC_SUCCESS);
4939: }
4941: /*@
4942: MatDuplicate - Duplicates a matrix including the non-zero structure.
4944: Collective
4946: Input Parameters:
4947: + mat - the matrix
4948: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4949: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4951: Output Parameter:
4952: . M - pointer to place new matrix
4954: Level: intermediate
4956: Notes:
4957: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
4959: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
4961: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4963: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4964: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4965: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
4967: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4968: @*/
4969: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4970: {
4971: Mat B;
4972: VecType vtype;
4973: PetscInt i;
4974: PetscObject dm, container_h, container_d;
4975: void (*viewf)(void);
4977: PetscFunctionBegin;
4980: PetscAssertPointer(M, 3);
4981: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4982: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4983: MatCheckPreallocated(mat, 1);
4985: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4986: PetscUseTypeMethod(mat, duplicate, op, M);
4987: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4988: B = *M;
4990: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4991: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4992: PetscCall(MatGetVecType(mat, &vtype));
4993: PetscCall(MatSetVecType(B, vtype));
4995: B->stencil.dim = mat->stencil.dim;
4996: B->stencil.noc = mat->stencil.noc;
4997: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4998: B->stencil.dims[i] = mat->stencil.dims[i];
4999: B->stencil.starts[i] = mat->stencil.starts[i];
5000: }
5002: B->nooffproczerorows = mat->nooffproczerorows;
5003: B->nooffprocentries = mat->nooffprocentries;
5005: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
5006: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
5007: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
5008: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
5009: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
5010: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
5011: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
5012: PetscCall(PetscObjectStateIncrease((PetscObject)B));
5013: PetscFunctionReturn(PETSC_SUCCESS);
5014: }
5016: /*@
5017: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
5019: Logically Collective
5021: Input Parameter:
5022: . mat - the matrix
5024: Output Parameter:
5025: . v - the diagonal of the matrix
5027: Level: intermediate
5029: Note:
5030: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
5031: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
5032: is larger than `ndiag`, the values of the remaining entries are unspecified.
5034: Currently only correct in parallel for square matrices.
5036: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
5037: @*/
5038: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
5039: {
5040: PetscFunctionBegin;
5044: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5045: MatCheckPreallocated(mat, 1);
5046: if (PetscDefined(USE_DEBUG)) {
5047: PetscInt nv, row, col, ndiag;
5049: PetscCall(VecGetLocalSize(v, &nv));
5050: PetscCall(MatGetLocalSize(mat, &row, &col));
5051: ndiag = PetscMin(row, col);
5052: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5053: }
5055: PetscUseTypeMethod(mat, getdiagonal, v);
5056: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5057: PetscFunctionReturn(PETSC_SUCCESS);
5058: }
5060: /*@
5061: MatGetRowMin - Gets the minimum value (of the real part) of each
5062: row of the matrix
5064: Logically Collective
5066: Input Parameter:
5067: . mat - the matrix
5069: Output Parameters:
5070: + v - the vector for storing the maximums
5071: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5073: Level: intermediate
5075: Note:
5076: The result of this call are the same as if one converted the matrix to dense format
5077: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5079: This code is only implemented for a couple of matrix formats.
5081: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5082: `MatGetRowMax()`
5083: @*/
5084: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5085: {
5086: PetscFunctionBegin;
5090: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5092: if (!mat->cmap->N) {
5093: PetscCall(VecSet(v, PETSC_MAX_REAL));
5094: if (idx) {
5095: PetscInt i, m = mat->rmap->n;
5096: for (i = 0; i < m; i++) idx[i] = -1;
5097: }
5098: } else {
5099: MatCheckPreallocated(mat, 1);
5100: }
5101: PetscUseTypeMethod(mat, getrowmin, v, idx);
5102: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5103: PetscFunctionReturn(PETSC_SUCCESS);
5104: }
5106: /*@
5107: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5108: row of the matrix
5110: Logically Collective
5112: Input Parameter:
5113: . mat - the matrix
5115: Output Parameters:
5116: + v - the vector for storing the minimums
5117: - idx - the indices of the column found for each row (or `NULL` if not needed)
5119: Level: intermediate
5121: Notes:
5122: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5123: row is 0 (the first column).
5125: This code is only implemented for a couple of matrix formats.
5127: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5128: @*/
5129: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5130: {
5131: PetscFunctionBegin;
5135: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5136: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5138: if (!mat->cmap->N) {
5139: PetscCall(VecSet(v, 0.0));
5140: if (idx) {
5141: PetscInt i, m = mat->rmap->n;
5142: for (i = 0; i < m; i++) idx[i] = -1;
5143: }
5144: } else {
5145: MatCheckPreallocated(mat, 1);
5146: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5147: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5148: }
5149: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5150: PetscFunctionReturn(PETSC_SUCCESS);
5151: }
5153: /*@
5154: MatGetRowMax - Gets the maximum value (of the real part) of each
5155: row of the matrix
5157: Logically Collective
5159: Input Parameter:
5160: . mat - the matrix
5162: Output Parameters:
5163: + v - the vector for storing the maximums
5164: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5166: Level: intermediate
5168: Notes:
5169: The result of this call are the same as if one converted the matrix to dense format
5170: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5172: This code is only implemented for a couple of matrix formats.
5174: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5175: @*/
5176: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5177: {
5178: PetscFunctionBegin;
5182: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5184: if (!mat->cmap->N) {
5185: PetscCall(VecSet(v, PETSC_MIN_REAL));
5186: if (idx) {
5187: PetscInt i, m = mat->rmap->n;
5188: for (i = 0; i < m; i++) idx[i] = -1;
5189: }
5190: } else {
5191: MatCheckPreallocated(mat, 1);
5192: PetscUseTypeMethod(mat, getrowmax, v, idx);
5193: }
5194: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5195: PetscFunctionReturn(PETSC_SUCCESS);
5196: }
5198: /*@
5199: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5200: row of the matrix
5202: Logically Collective
5204: Input Parameter:
5205: . mat - the matrix
5207: Output Parameters:
5208: + v - the vector for storing the maximums
5209: - idx - the indices of the column found for each row (or `NULL` if not needed)
5211: Level: intermediate
5213: Notes:
5214: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5215: row is 0 (the first column).
5217: This code is only implemented for a couple of matrix formats.
5219: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5220: @*/
5221: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5222: {
5223: PetscFunctionBegin;
5227: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5229: if (!mat->cmap->N) {
5230: PetscCall(VecSet(v, 0.0));
5231: if (idx) {
5232: PetscInt i, m = mat->rmap->n;
5233: for (i = 0; i < m; i++) idx[i] = -1;
5234: }
5235: } else {
5236: MatCheckPreallocated(mat, 1);
5237: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5238: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5239: }
5240: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5241: PetscFunctionReturn(PETSC_SUCCESS);
5242: }
5244: /*@
5245: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5247: Logically Collective
5249: Input Parameter:
5250: . mat - the matrix
5252: Output Parameter:
5253: . v - the vector for storing the sum
5255: Level: intermediate
5257: This code is only implemented for a couple of matrix formats.
5259: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5260: @*/
5261: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5262: {
5263: PetscFunctionBegin;
5267: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5269: if (!mat->cmap->N) {
5270: PetscCall(VecSet(v, 0.0));
5271: } else {
5272: MatCheckPreallocated(mat, 1);
5273: PetscUseTypeMethod(mat, getrowsumabs, v);
5274: }
5275: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5276: PetscFunctionReturn(PETSC_SUCCESS);
5277: }
5279: /*@
5280: MatGetRowSum - Gets the sum of each row of the matrix
5282: Logically or Neighborhood Collective
5284: Input Parameter:
5285: . mat - the matrix
5287: Output Parameter:
5288: . v - the vector for storing the sum of rows
5290: Level: intermediate
5292: Note:
5293: This code is slow since it is not currently specialized for different formats
5295: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5296: @*/
5297: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5298: {
5299: Vec ones;
5301: PetscFunctionBegin;
5305: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5306: MatCheckPreallocated(mat, 1);
5307: PetscCall(MatCreateVecs(mat, &ones, NULL));
5308: PetscCall(VecSet(ones, 1.));
5309: PetscCall(MatMult(mat, ones, v));
5310: PetscCall(VecDestroy(&ones));
5311: PetscFunctionReturn(PETSC_SUCCESS);
5312: }
5314: /*@
5315: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5316: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5318: Collective
5320: Input Parameter:
5321: . mat - the matrix to provide the transpose
5323: Output Parameter:
5324: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5326: Level: advanced
5328: Note:
5329: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5330: routine allows bypassing that call.
5332: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5333: @*/
5334: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5335: {
5336: MatParentState *rb = NULL;
5338: PetscFunctionBegin;
5339: PetscCall(PetscNew(&rb));
5340: rb->id = ((PetscObject)mat)->id;
5341: rb->state = 0;
5342: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5343: PetscCall(PetscObjectContainerCompose((PetscObject)B, "MatTransposeParent", rb, PetscCtxDestroyDefault));
5344: PetscFunctionReturn(PETSC_SUCCESS);
5345: }
5347: /*@
5348: MatTranspose - Computes the transpose of a matrix, either in-place or out-of-place.
5350: Collective
5352: Input Parameters:
5353: + mat - the matrix to transpose
5354: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5356: Output Parameter:
5357: . B - the transpose of the matrix
5359: Level: intermediate
5361: Notes:
5362: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5364: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX` to store the transpose. If you already have a matrix to contain the
5365: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5367: If the nonzero structure of `mat` changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5369: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose but don't need the storage to be changed.
5370: For example, the result of `MatCreateTranspose()` will compute the transpose of the given matrix times a vector for matrix-vector products computed with `MatMult()`.
5372: If `mat` is unchanged from the last call this function returns immediately without recomputing the result
5374: If you only need the symbolic transpose of a matrix, and not the numerical values, use `MatTransposeSymbolic()`
5376: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5377: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5378: @*/
5379: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5380: {
5381: PetscContainer rB = NULL;
5382: MatParentState *rb = NULL;
5384: PetscFunctionBegin;
5387: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5388: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5389: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5390: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5391: MatCheckPreallocated(mat, 1);
5392: if (reuse == MAT_REUSE_MATRIX) {
5393: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5394: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5395: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5396: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5397: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5398: }
5400: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5401: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5402: PetscUseTypeMethod(mat, transpose, reuse, B);
5403: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5404: }
5405: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5407: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5408: if (reuse != MAT_INPLACE_MATRIX) {
5409: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5410: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5411: rb->state = ((PetscObject)mat)->state;
5412: rb->nonzerostate = mat->nonzerostate;
5413: }
5414: PetscFunctionReturn(PETSC_SUCCESS);
5415: }
5417: /*@
5418: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5420: Collective
5422: Input Parameter:
5423: . A - the matrix to transpose
5425: Output Parameter:
5426: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5427: numerical portion.
5429: Level: intermediate
5431: Note:
5432: This is not supported for many matrix types, use `MatTranspose()` in those cases
5434: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5435: @*/
5436: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5437: {
5438: PetscFunctionBegin;
5441: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5442: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5443: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5444: PetscUseTypeMethod(A, transposesymbolic, B);
5445: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5447: PetscCall(MatTransposeSetPrecursor(A, *B));
5448: PetscFunctionReturn(PETSC_SUCCESS);
5449: }
5451: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5452: {
5453: PetscContainer rB;
5454: MatParentState *rb;
5456: PetscFunctionBegin;
5459: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5460: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5461: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5462: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5463: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5464: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5465: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5466: PetscFunctionReturn(PETSC_SUCCESS);
5467: }
5469: /*@
5470: MatIsTranspose - Test whether a matrix is another one's transpose,
5471: or its own, in which case it tests symmetry.
5473: Collective
5475: Input Parameters:
5476: + A - the matrix to test
5477: . B - the matrix to test against, this can equal the first parameter
5478: - tol - tolerance, differences between entries smaller than this are counted as zero
5480: Output Parameter:
5481: . flg - the result
5483: Level: intermediate
5485: Notes:
5486: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5487: test involves parallel copies of the block off-diagonal parts of the matrix.
5489: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5490: @*/
5491: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5492: {
5493: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5495: PetscFunctionBegin;
5498: PetscAssertPointer(flg, 4);
5499: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5500: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5501: *flg = PETSC_FALSE;
5502: if (f && g) {
5503: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5504: PetscCall((*f)(A, B, tol, flg));
5505: } else {
5506: MatType mattype;
5508: PetscCall(MatGetType(f ? B : A, &mattype));
5509: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5510: }
5511: PetscFunctionReturn(PETSC_SUCCESS);
5512: }
5514: /*@
5515: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5517: Collective
5519: Input Parameters:
5520: + mat - the matrix to transpose and complex conjugate
5521: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5523: Output Parameter:
5524: . B - the Hermitian transpose
5526: Level: intermediate
5528: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5529: @*/
5530: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5531: {
5532: PetscFunctionBegin;
5533: PetscCall(MatTranspose(mat, reuse, B));
5534: #if defined(PETSC_USE_COMPLEX)
5535: PetscCall(MatConjugate(*B));
5536: #endif
5537: PetscFunctionReturn(PETSC_SUCCESS);
5538: }
5540: /*@
5541: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5543: Collective
5545: Input Parameters:
5546: + A - the matrix to test
5547: . B - the matrix to test against, this can equal the first parameter
5548: - tol - tolerance, differences between entries smaller than this are counted as zero
5550: Output Parameter:
5551: . flg - the result
5553: Level: intermediate
5555: Notes:
5556: Only available for `MATAIJ` matrices.
5558: The sequential algorithm
5559: has a running time of the order of the number of nonzeros; the parallel
5560: test involves parallel copies of the block off-diagonal parts of the matrix.
5562: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5563: @*/
5564: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5565: {
5566: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5568: PetscFunctionBegin;
5571: PetscAssertPointer(flg, 4);
5572: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5573: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5574: if (f && g) {
5575: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5576: PetscCall((*f)(A, B, tol, flg));
5577: }
5578: PetscFunctionReturn(PETSC_SUCCESS);
5579: }
5581: /*@
5582: MatPermute - Creates a new matrix with rows and columns permuted from the
5583: original.
5585: Collective
5587: Input Parameters:
5588: + mat - the matrix to permute
5589: . row - row permutation, each processor supplies only the permutation for its rows
5590: - col - column permutation, each processor supplies only the permutation for its columns
5592: Output Parameter:
5593: . B - the permuted matrix
5595: Level: advanced
5597: Note:
5598: The index sets map from row/col of permuted matrix to row/col of original matrix.
5599: The index sets should be on the same communicator as mat and have the same local sizes.
5601: Developer Note:
5602: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5603: exploit the fact that row and col are permutations, consider implementing the
5604: more general `MatCreateSubMatrix()` instead.
5606: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5607: @*/
5608: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5609: {
5610: PetscFunctionBegin;
5615: PetscAssertPointer(B, 4);
5616: PetscCheckSameComm(mat, 1, row, 2);
5617: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5618: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5619: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5620: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5621: MatCheckPreallocated(mat, 1);
5623: if (mat->ops->permute) {
5624: PetscUseTypeMethod(mat, permute, row, col, B);
5625: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5626: } else {
5627: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5628: }
5629: PetscFunctionReturn(PETSC_SUCCESS);
5630: }
5632: /*@
5633: MatEqual - Compares two matrices.
5635: Collective
5637: Input Parameters:
5638: + A - the first matrix
5639: - B - the second matrix
5641: Output Parameter:
5642: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5644: Level: intermediate
5646: Note:
5647: If either of the matrix is "matrix-free", meaning the matrix entries are not stored explicitly then equality is determined by comparing the results of several matrix-vector product
5648: using several randomly created vectors, see `MatMultEqual()`.
5650: .seealso: [](ch_matrices), `Mat`, `MatMultEqual()`
5651: @*/
5652: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5653: {
5654: PetscFunctionBegin;
5659: PetscAssertPointer(flg, 3);
5660: PetscCheckSameComm(A, 1, B, 2);
5661: MatCheckPreallocated(A, 1);
5662: MatCheckPreallocated(B, 2);
5663: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5664: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5665: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5666: B->cmap->N);
5667: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5668: PetscUseTypeMethod(A, equal, B, flg);
5669: } else {
5670: PetscCall(MatMultEqual(A, B, 10, flg));
5671: }
5672: PetscFunctionReturn(PETSC_SUCCESS);
5673: }
5675: /*@
5676: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5677: matrices that are stored as vectors. Either of the two scaling
5678: matrices can be `NULL`.
5680: Collective
5682: Input Parameters:
5683: + mat - the matrix to be scaled
5684: . l - the left scaling vector (or `NULL`)
5685: - r - the right scaling vector (or `NULL`)
5687: Level: intermediate
5689: Note:
5690: `MatDiagonalScale()` computes $A = LAR$, where
5691: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5692: The L scales the rows of the matrix, the R scales the columns of the matrix.
5694: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5695: @*/
5696: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5697: {
5698: PetscFunctionBegin;
5701: if (l) {
5703: PetscCheckSameComm(mat, 1, l, 2);
5704: }
5705: if (r) {
5707: PetscCheckSameComm(mat, 1, r, 3);
5708: }
5709: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5710: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5711: MatCheckPreallocated(mat, 1);
5712: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5714: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5715: PetscUseTypeMethod(mat, diagonalscale, l, r);
5716: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5717: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5718: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5719: PetscFunctionReturn(PETSC_SUCCESS);
5720: }
5722: /*@
5723: MatScale - Scales all elements of a matrix by a given number.
5725: Logically Collective
5727: Input Parameters:
5728: + mat - the matrix to be scaled
5729: - a - the scaling value
5731: Level: intermediate
5733: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5734: @*/
5735: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5736: {
5737: PetscFunctionBegin;
5740: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5741: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5743: MatCheckPreallocated(mat, 1);
5745: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5746: if (a != (PetscScalar)1.0) {
5747: PetscUseTypeMethod(mat, scale, a);
5748: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5749: }
5750: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5751: PetscFunctionReturn(PETSC_SUCCESS);
5752: }
5754: /*@
5755: MatNorm - Calculates various norms of a matrix.
5757: Collective
5759: Input Parameters:
5760: + mat - the matrix
5761: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5763: Output Parameter:
5764: . nrm - the resulting norm
5766: Level: intermediate
5768: .seealso: [](ch_matrices), `Mat`
5769: @*/
5770: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5771: {
5772: PetscFunctionBegin;
5775: PetscAssertPointer(nrm, 3);
5777: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5778: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5779: MatCheckPreallocated(mat, 1);
5781: PetscUseTypeMethod(mat, norm, type, nrm);
5782: PetscFunctionReturn(PETSC_SUCCESS);
5783: }
5785: /*
5786: This variable is used to prevent counting of MatAssemblyBegin() that
5787: are called from within a MatAssemblyEnd().
5788: */
5789: static PetscInt MatAssemblyEnd_InUse = 0;
5790: /*@
5791: MatAssemblyBegin - Begins assembling the matrix. This routine should
5792: be called after completing all calls to `MatSetValues()`.
5794: Collective
5796: Input Parameters:
5797: + mat - the matrix
5798: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5800: Level: beginner
5802: Notes:
5803: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5804: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5806: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5807: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5808: using the matrix.
5810: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5811: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5812: a global collective operation requiring all processes that share the matrix.
5814: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5815: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5816: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5818: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5819: @*/
5820: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5821: {
5822: PetscFunctionBegin;
5825: MatCheckPreallocated(mat, 1);
5826: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5827: if (mat->assembled) {
5828: mat->was_assembled = PETSC_TRUE;
5829: mat->assembled = PETSC_FALSE;
5830: }
5832: if (!MatAssemblyEnd_InUse) {
5833: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5834: PetscTryTypeMethod(mat, assemblybegin, type);
5835: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5836: } else PetscTryTypeMethod(mat, assemblybegin, type);
5837: PetscFunctionReturn(PETSC_SUCCESS);
5838: }
5840: /*@
5841: MatAssembled - Indicates if a matrix has been assembled and is ready for
5842: use; for example, in matrix-vector product.
5844: Not Collective
5846: Input Parameter:
5847: . mat - the matrix
5849: Output Parameter:
5850: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5852: Level: advanced
5854: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5855: @*/
5856: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5857: {
5858: PetscFunctionBegin;
5860: PetscAssertPointer(assembled, 2);
5861: *assembled = mat->assembled;
5862: PetscFunctionReturn(PETSC_SUCCESS);
5863: }
5865: /*@
5866: MatAssemblyEnd - Completes assembling the matrix. This routine should
5867: be called after `MatAssemblyBegin()`.
5869: Collective
5871: Input Parameters:
5872: + mat - the matrix
5873: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5875: Options Database Keys:
5876: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5877: . -mat_view ::ascii_info_detail - Prints more detailed info
5878: . -mat_view - Prints matrix in ASCII format
5879: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5880: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5881: . -display <name> - Sets display name (default is host)
5882: . -draw_pause <sec> - Sets number of seconds to pause after display
5883: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5884: . -viewer_socket_machine <machine> - Machine to use for socket
5885: . -viewer_socket_port <port> - Port number to use for socket
5886: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5888: Level: beginner
5890: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5891: @*/
5892: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5893: {
5894: static PetscInt inassm = 0;
5895: PetscBool flg = PETSC_FALSE;
5897: PetscFunctionBegin;
5901: inassm++;
5902: MatAssemblyEnd_InUse++;
5903: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5904: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5905: PetscTryTypeMethod(mat, assemblyend, type);
5906: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5907: } else PetscTryTypeMethod(mat, assemblyend, type);
5909: /* Flush assembly is not a true assembly */
5910: if (type != MAT_FLUSH_ASSEMBLY) {
5911: if (mat->num_ass) {
5912: if (!mat->symmetry_eternal) {
5913: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5914: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5915: }
5916: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5917: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5918: }
5919: mat->num_ass++;
5920: mat->assembled = PETSC_TRUE;
5921: mat->ass_nonzerostate = mat->nonzerostate;
5922: }
5924: mat->insertmode = NOT_SET_VALUES;
5925: MatAssemblyEnd_InUse--;
5926: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5927: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5928: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5930: if (mat->checksymmetryonassembly) {
5931: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5932: if (flg) {
5933: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5934: } else {
5935: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5936: }
5937: }
5938: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5939: }
5940: inassm--;
5941: PetscFunctionReturn(PETSC_SUCCESS);
5942: }
5944: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5945: /*@
5946: MatSetOption - Sets a parameter option for a matrix. Some options
5947: may be specific to certain storage formats. Some options
5948: determine how values will be inserted (or added). Sorted,
5949: row-oriented input will generally assemble the fastest. The default
5950: is row-oriented.
5952: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5954: Input Parameters:
5955: + mat - the matrix
5956: . op - the option, one of those listed below (and possibly others),
5957: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5959: Options Describing Matrix Structure:
5960: + `MAT_SPD` - symmetric positive definite
5961: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5962: . `MAT_HERMITIAN` - transpose is the complex conjugation
5963: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5964: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5965: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5966: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5968: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5969: do not need to be computed (usually at a high cost)
5971: Options For Use with `MatSetValues()`:
5972: Insert a logically dense subblock, which can be
5973: . `MAT_ROW_ORIENTED` - row-oriented (default)
5975: These options reflect the data you pass in with `MatSetValues()`; it has
5976: nothing to do with how the data is stored internally in the matrix
5977: data structure.
5979: When (re)assembling a matrix, we can restrict the input for
5980: efficiency/debugging purposes. These options include
5981: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5982: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5983: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5984: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5985: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5986: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5987: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5988: performance for very large process counts.
5989: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5990: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5991: functions, instead sending only neighbor messages.
5993: Level: intermediate
5995: Notes:
5996: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5998: Some options are relevant only for particular matrix types and
5999: are thus ignored by others. Other options are not supported by
6000: certain matrix types and will generate an error message if set.
6002: If using Fortran to compute a matrix, one may need to
6003: use the column-oriented option (or convert to the row-oriented
6004: format).
6006: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
6007: that would generate a new entry in the nonzero structure is instead
6008: ignored. Thus, if memory has not already been allocated for this particular
6009: data, then the insertion is ignored. For dense matrices, in which
6010: the entire array is allocated, no entries are ever ignored.
6011: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6013: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
6014: that would generate a new entry in the nonzero structure instead produces
6015: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6017: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
6018: that would generate a new entry that has not been preallocated will
6019: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
6020: only.) This is a useful flag when debugging matrix memory preallocation.
6021: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6023: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
6024: other processors should be dropped, rather than stashed.
6025: This is useful if you know that the "owning" processor is also
6026: always generating the correct matrix entries, so that PETSc need
6027: not transfer duplicate entries generated on another processor.
6029: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6030: searches during matrix assembly. When this flag is set, the hash table
6031: is created during the first matrix assembly. This hash table is
6032: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6033: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6034: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6035: supported by `MATMPIBAIJ` format only.
6037: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6038: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6040: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6041: a zero location in the matrix
6043: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6045: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6046: zero row routines and thus improves performance for very large process counts.
6048: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6049: part of the matrix (since they should match the upper triangular part).
6051: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6052: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6053: with finite difference schemes with non-periodic boundary conditions.
6055: Developer Note:
6056: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6057: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6058: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6059: not changed.
6061: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6062: @*/
6063: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6064: {
6065: PetscFunctionBegin;
6067: if (op > 0) {
6070: }
6072: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6074: switch (op) {
6075: case MAT_FORCE_DIAGONAL_ENTRIES:
6076: mat->force_diagonals = flg;
6077: PetscFunctionReturn(PETSC_SUCCESS);
6078: case MAT_NO_OFF_PROC_ENTRIES:
6079: mat->nooffprocentries = flg;
6080: PetscFunctionReturn(PETSC_SUCCESS);
6081: case MAT_SUBSET_OFF_PROC_ENTRIES:
6082: mat->assembly_subset = flg;
6083: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6084: #if !defined(PETSC_HAVE_MPIUNI)
6085: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6086: #endif
6087: mat->stash.first_assembly_done = PETSC_FALSE;
6088: }
6089: PetscFunctionReturn(PETSC_SUCCESS);
6090: case MAT_NO_OFF_PROC_ZERO_ROWS:
6091: mat->nooffproczerorows = flg;
6092: PetscFunctionReturn(PETSC_SUCCESS);
6093: case MAT_SPD:
6094: if (flg) {
6095: mat->spd = PETSC_BOOL3_TRUE;
6096: mat->symmetric = PETSC_BOOL3_TRUE;
6097: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6098: } else {
6099: mat->spd = PETSC_BOOL3_FALSE;
6100: }
6101: break;
6102: case MAT_SYMMETRIC:
6103: mat->symmetric = PetscBoolToBool3(flg);
6104: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6105: #if !defined(PETSC_USE_COMPLEX)
6106: mat->hermitian = PetscBoolToBool3(flg);
6107: #endif
6108: break;
6109: case MAT_HERMITIAN:
6110: mat->hermitian = PetscBoolToBool3(flg);
6111: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6112: #if !defined(PETSC_USE_COMPLEX)
6113: mat->symmetric = PetscBoolToBool3(flg);
6114: #endif
6115: break;
6116: case MAT_STRUCTURALLY_SYMMETRIC:
6117: mat->structurally_symmetric = PetscBoolToBool3(flg);
6118: break;
6119: case MAT_SYMMETRY_ETERNAL:
6120: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6121: mat->symmetry_eternal = flg;
6122: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6123: break;
6124: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6125: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6126: mat->structural_symmetry_eternal = flg;
6127: break;
6128: case MAT_SPD_ETERNAL:
6129: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6130: mat->spd_eternal = flg;
6131: if (flg) {
6132: mat->structural_symmetry_eternal = PETSC_TRUE;
6133: mat->symmetry_eternal = PETSC_TRUE;
6134: }
6135: break;
6136: case MAT_STRUCTURE_ONLY:
6137: mat->structure_only = flg;
6138: break;
6139: case MAT_SORTED_FULL:
6140: mat->sortedfull = flg;
6141: break;
6142: default:
6143: break;
6144: }
6145: PetscTryTypeMethod(mat, setoption, op, flg);
6146: PetscFunctionReturn(PETSC_SUCCESS);
6147: }
6149: /*@
6150: MatGetOption - Gets a parameter option that has been set for a matrix.
6152: Logically Collective
6154: Input Parameters:
6155: + mat - the matrix
6156: - op - the option, this only responds to certain options, check the code for which ones
6158: Output Parameter:
6159: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6161: Level: intermediate
6163: Notes:
6164: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6166: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6167: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6169: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6170: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6171: @*/
6172: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6173: {
6174: PetscFunctionBegin;
6178: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6179: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6181: switch (op) {
6182: case MAT_NO_OFF_PROC_ENTRIES:
6183: *flg = mat->nooffprocentries;
6184: break;
6185: case MAT_NO_OFF_PROC_ZERO_ROWS:
6186: *flg = mat->nooffproczerorows;
6187: break;
6188: case MAT_SYMMETRIC:
6189: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6190: break;
6191: case MAT_HERMITIAN:
6192: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6193: break;
6194: case MAT_STRUCTURALLY_SYMMETRIC:
6195: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6196: break;
6197: case MAT_SPD:
6198: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6199: break;
6200: case MAT_SYMMETRY_ETERNAL:
6201: *flg = mat->symmetry_eternal;
6202: break;
6203: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6204: *flg = mat->symmetry_eternal;
6205: break;
6206: default:
6207: break;
6208: }
6209: PetscFunctionReturn(PETSC_SUCCESS);
6210: }
6212: /*@
6213: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6214: this routine retains the old nonzero structure.
6216: Logically Collective
6218: Input Parameter:
6219: . mat - the matrix
6221: Level: intermediate
6223: Note:
6224: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6225: See the Performance chapter of the users manual for information on preallocating matrices.
6227: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6228: @*/
6229: PetscErrorCode MatZeroEntries(Mat mat)
6230: {
6231: PetscFunctionBegin;
6234: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6235: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6236: MatCheckPreallocated(mat, 1);
6238: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6239: PetscUseTypeMethod(mat, zeroentries);
6240: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6241: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6242: PetscFunctionReturn(PETSC_SUCCESS);
6243: }
6245: /*@
6246: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6247: of a set of rows and columns of a matrix.
6249: Collective
6251: Input Parameters:
6252: + mat - the matrix
6253: . numRows - the number of rows/columns to zero
6254: . rows - the global row indices
6255: . diag - value put in the diagonal of the eliminated rows
6256: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6257: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6259: Level: intermediate
6261: Notes:
6262: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6264: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6265: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6267: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6268: Krylov method to take advantage of the known solution on the zeroed rows.
6270: For the parallel case, all processes that share the matrix (i.e.,
6271: those in the communicator used for matrix creation) MUST call this
6272: routine, regardless of whether any rows being zeroed are owned by
6273: them.
6275: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6276: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6277: missing.
6279: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6280: list only rows local to itself).
6282: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6284: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6285: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6286: @*/
6287: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6288: {
6289: PetscFunctionBegin;
6292: if (numRows) PetscAssertPointer(rows, 3);
6293: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6294: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6295: MatCheckPreallocated(mat, 1);
6297: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6298: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6299: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6300: PetscFunctionReturn(PETSC_SUCCESS);
6301: }
6303: /*@
6304: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6305: of a set of rows and columns of a matrix.
6307: Collective
6309: Input Parameters:
6310: + mat - the matrix
6311: . is - the rows to zero
6312: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6313: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6314: - b - optional vector of right-hand side, that will be adjusted by provided solution
6316: Level: intermediate
6318: Note:
6319: See `MatZeroRowsColumns()` for details on how this routine operates.
6321: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6322: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6323: @*/
6324: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6325: {
6326: PetscInt numRows;
6327: const PetscInt *rows;
6329: PetscFunctionBegin;
6334: PetscCall(ISGetLocalSize(is, &numRows));
6335: PetscCall(ISGetIndices(is, &rows));
6336: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6337: PetscCall(ISRestoreIndices(is, &rows));
6338: PetscFunctionReturn(PETSC_SUCCESS);
6339: }
6341: /*@
6342: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6343: of a set of rows of a matrix.
6345: Collective
6347: Input Parameters:
6348: + mat - the matrix
6349: . numRows - the number of rows to zero
6350: . rows - the global row indices
6351: . diag - value put in the diagonal of the zeroed rows
6352: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6353: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6355: Level: intermediate
6357: Notes:
6358: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6360: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6362: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6363: Krylov method to take advantage of the known solution on the zeroed rows.
6365: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6366: from the matrix.
6368: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6369: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense
6370: formats this does not alter the nonzero structure.
6372: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6373: of the matrix is not changed the values are
6374: merely zeroed.
6376: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6377: formats can optionally remove the main diagonal entry from the
6378: nonzero structure as well, by passing 0.0 as the final argument).
6380: For the parallel case, all processes that share the matrix (i.e.,
6381: those in the communicator used for matrix creation) MUST call this
6382: routine, regardless of whether any rows being zeroed are owned by
6383: them.
6385: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6386: list only rows local to itself).
6388: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6389: owns that are to be zeroed. This saves a global synchronization in the implementation.
6391: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6392: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6393: @*/
6394: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6395: {
6396: PetscFunctionBegin;
6399: if (numRows) PetscAssertPointer(rows, 3);
6400: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6401: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6402: MatCheckPreallocated(mat, 1);
6404: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6405: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6406: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6407: PetscFunctionReturn(PETSC_SUCCESS);
6408: }
6410: /*@
6411: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6412: of a set of rows of a matrix indicated by an `IS`
6414: Collective
6416: Input Parameters:
6417: + mat - the matrix
6418: . is - index set, `IS`, of rows to remove (if `NULL` then no row is removed)
6419: . diag - value put in all diagonals of eliminated rows
6420: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6421: - b - optional vector of right-hand side, that will be adjusted by provided solution
6423: Level: intermediate
6425: Note:
6426: See `MatZeroRows()` for details on how this routine operates.
6428: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6429: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `IS`
6430: @*/
6431: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6432: {
6433: PetscInt numRows = 0;
6434: const PetscInt *rows = NULL;
6436: PetscFunctionBegin;
6439: if (is) {
6441: PetscCall(ISGetLocalSize(is, &numRows));
6442: PetscCall(ISGetIndices(is, &rows));
6443: }
6444: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6445: if (is) PetscCall(ISRestoreIndices(is, &rows));
6446: PetscFunctionReturn(PETSC_SUCCESS);
6447: }
6449: /*@
6450: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6451: of a set of rows of a matrix indicated by a `MatStencil`. These rows must be local to the process.
6453: Collective
6455: Input Parameters:
6456: + mat - the matrix
6457: . numRows - the number of rows to remove
6458: . rows - the grid coordinates (and component number when dof > 1) for matrix rows indicated by an array of `MatStencil`
6459: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6460: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6461: - b - optional vector of right-hand side, that will be adjusted by provided solution
6463: Level: intermediate
6465: Notes:
6466: See `MatZeroRows()` for details on how this routine operates.
6468: The grid coordinates are across the entire grid, not just the local portion
6470: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6471: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6472: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6473: `DM_BOUNDARY_PERIODIC` boundary type.
6475: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6476: a single value per point) you can skip filling those indices.
6478: Fortran Note:
6479: `idxm` and `idxn` should be declared as
6480: $ MatStencil idxm(4, m)
6481: and the values inserted using
6482: .vb
6483: idxm(MatStencil_i, 1) = i
6484: idxm(MatStencil_j, 1) = j
6485: idxm(MatStencil_k, 1) = k
6486: idxm(MatStencil_c, 1) = c
6487: etc
6488: .ve
6490: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6491: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6492: @*/
6493: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6494: {
6495: PetscInt dim = mat->stencil.dim;
6496: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6497: PetscInt *dims = mat->stencil.dims + 1;
6498: PetscInt *starts = mat->stencil.starts;
6499: PetscInt *dxm = (PetscInt *)rows;
6500: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6502: PetscFunctionBegin;
6505: if (numRows) PetscAssertPointer(rows, 3);
6507: PetscCall(PetscMalloc1(numRows, &jdxm));
6508: for (i = 0; i < numRows; ++i) {
6509: /* Skip unused dimensions (they are ordered k, j, i, c) */
6510: for (j = 0; j < 3 - sdim; ++j) dxm++;
6511: /* Local index in X dir */
6512: tmp = *dxm++ - starts[0];
6513: /* Loop over remaining dimensions */
6514: for (j = 0; j < dim - 1; ++j) {
6515: /* If nonlocal, set index to be negative */
6516: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6517: /* Update local index */
6518: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6519: }
6520: /* Skip component slot if necessary */
6521: if (mat->stencil.noc) dxm++;
6522: /* Local row number */
6523: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6524: }
6525: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6526: PetscCall(PetscFree(jdxm));
6527: PetscFunctionReturn(PETSC_SUCCESS);
6528: }
6530: /*@
6531: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6532: of a set of rows and columns of a matrix.
6534: Collective
6536: Input Parameters:
6537: + mat - the matrix
6538: . numRows - the number of rows/columns to remove
6539: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6540: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6541: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6542: - b - optional vector of right-hand side, that will be adjusted by provided solution
6544: Level: intermediate
6546: Notes:
6547: See `MatZeroRowsColumns()` for details on how this routine operates.
6549: The grid coordinates are across the entire grid, not just the local portion
6551: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6552: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6553: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6554: `DM_BOUNDARY_PERIODIC` boundary type.
6556: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6557: a single value per point) you can skip filling those indices.
6559: Fortran Note:
6560: `idxm` and `idxn` should be declared as
6561: $ MatStencil idxm(4, m)
6562: and the values inserted using
6563: .vb
6564: idxm(MatStencil_i, 1) = i
6565: idxm(MatStencil_j, 1) = j
6566: idxm(MatStencil_k, 1) = k
6567: idxm(MatStencil_c, 1) = c
6568: etc
6569: .ve
6571: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6572: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6573: @*/
6574: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6575: {
6576: PetscInt dim = mat->stencil.dim;
6577: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6578: PetscInt *dims = mat->stencil.dims + 1;
6579: PetscInt *starts = mat->stencil.starts;
6580: PetscInt *dxm = (PetscInt *)rows;
6581: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6583: PetscFunctionBegin;
6586: if (numRows) PetscAssertPointer(rows, 3);
6588: PetscCall(PetscMalloc1(numRows, &jdxm));
6589: for (i = 0; i < numRows; ++i) {
6590: /* Skip unused dimensions (they are ordered k, j, i, c) */
6591: for (j = 0; j < 3 - sdim; ++j) dxm++;
6592: /* Local index in X dir */
6593: tmp = *dxm++ - starts[0];
6594: /* Loop over remaining dimensions */
6595: for (j = 0; j < dim - 1; ++j) {
6596: /* If nonlocal, set index to be negative */
6597: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6598: /* Update local index */
6599: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6600: }
6601: /* Skip component slot if necessary */
6602: if (mat->stencil.noc) dxm++;
6603: /* Local row number */
6604: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6605: }
6606: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6607: PetscCall(PetscFree(jdxm));
6608: PetscFunctionReturn(PETSC_SUCCESS);
6609: }
6611: /*@
6612: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6613: of a set of rows of a matrix; using local numbering of rows.
6615: Collective
6617: Input Parameters:
6618: + mat - the matrix
6619: . numRows - the number of rows to remove
6620: . rows - the local row indices
6621: . diag - value put in all diagonals of eliminated rows
6622: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6623: - b - optional vector of right-hand side, that will be adjusted by provided solution
6625: Level: intermediate
6627: Notes:
6628: Before calling `MatZeroRowsLocal()`, the user must first set the
6629: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6631: See `MatZeroRows()` for details on how this routine operates.
6633: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6634: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6635: @*/
6636: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6637: {
6638: PetscFunctionBegin;
6641: if (numRows) PetscAssertPointer(rows, 3);
6642: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6643: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6644: MatCheckPreallocated(mat, 1);
6646: if (mat->ops->zerorowslocal) {
6647: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6648: } else {
6649: IS is, newis;
6650: const PetscInt *newRows;
6652: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6653: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6654: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6655: PetscCall(ISGetIndices(newis, &newRows));
6656: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6657: PetscCall(ISRestoreIndices(newis, &newRows));
6658: PetscCall(ISDestroy(&newis));
6659: PetscCall(ISDestroy(&is));
6660: }
6661: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6662: PetscFunctionReturn(PETSC_SUCCESS);
6663: }
6665: /*@
6666: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6667: of a set of rows of a matrix; using local numbering of rows.
6669: Collective
6671: Input Parameters:
6672: + mat - the matrix
6673: . is - index set of rows to remove
6674: . diag - value put in all diagonals of eliminated rows
6675: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6676: - b - optional vector of right-hand side, that will be adjusted by provided solution
6678: Level: intermediate
6680: Notes:
6681: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6682: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6684: See `MatZeroRows()` for details on how this routine operates.
6686: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6687: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6688: @*/
6689: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6690: {
6691: PetscInt numRows;
6692: const PetscInt *rows;
6694: PetscFunctionBegin;
6698: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6699: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6700: MatCheckPreallocated(mat, 1);
6702: PetscCall(ISGetLocalSize(is, &numRows));
6703: PetscCall(ISGetIndices(is, &rows));
6704: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6705: PetscCall(ISRestoreIndices(is, &rows));
6706: PetscFunctionReturn(PETSC_SUCCESS);
6707: }
6709: /*@
6710: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6711: of a set of rows and columns of a matrix; using local numbering of rows.
6713: Collective
6715: Input Parameters:
6716: + mat - the matrix
6717: . numRows - the number of rows to remove
6718: . rows - the global row indices
6719: . diag - value put in all diagonals of eliminated rows
6720: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6721: - b - optional vector of right-hand side, that will be adjusted by provided solution
6723: Level: intermediate
6725: Notes:
6726: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6727: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6729: See `MatZeroRowsColumns()` for details on how this routine operates.
6731: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6732: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6733: @*/
6734: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6735: {
6736: IS is, newis;
6737: const PetscInt *newRows;
6739: PetscFunctionBegin;
6742: if (numRows) PetscAssertPointer(rows, 3);
6743: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6744: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6745: MatCheckPreallocated(mat, 1);
6747: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6748: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6749: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6750: PetscCall(ISGetIndices(newis, &newRows));
6751: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6752: PetscCall(ISRestoreIndices(newis, &newRows));
6753: PetscCall(ISDestroy(&newis));
6754: PetscCall(ISDestroy(&is));
6755: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6756: PetscFunctionReturn(PETSC_SUCCESS);
6757: }
6759: /*@
6760: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6761: of a set of rows and columns of a matrix; using local numbering of rows.
6763: Collective
6765: Input Parameters:
6766: + mat - the matrix
6767: . is - index set of rows to remove
6768: . diag - value put in all diagonals of eliminated rows
6769: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6770: - b - optional vector of right-hand side, that will be adjusted by provided solution
6772: Level: intermediate
6774: Notes:
6775: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6776: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6778: See `MatZeroRowsColumns()` for details on how this routine operates.
6780: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6781: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6782: @*/
6783: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6784: {
6785: PetscInt numRows;
6786: const PetscInt *rows;
6788: PetscFunctionBegin;
6792: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6793: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6794: MatCheckPreallocated(mat, 1);
6796: PetscCall(ISGetLocalSize(is, &numRows));
6797: PetscCall(ISGetIndices(is, &rows));
6798: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6799: PetscCall(ISRestoreIndices(is, &rows));
6800: PetscFunctionReturn(PETSC_SUCCESS);
6801: }
6803: /*@
6804: MatGetSize - Returns the numbers of rows and columns in a matrix.
6806: Not Collective
6808: Input Parameter:
6809: . mat - the matrix
6811: Output Parameters:
6812: + m - the number of global rows
6813: - n - the number of global columns
6815: Level: beginner
6817: Note:
6818: Both output parameters can be `NULL` on input.
6820: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6821: @*/
6822: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6823: {
6824: PetscFunctionBegin;
6826: if (m) *m = mat->rmap->N;
6827: if (n) *n = mat->cmap->N;
6828: PetscFunctionReturn(PETSC_SUCCESS);
6829: }
6831: /*@
6832: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6833: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6835: Not Collective
6837: Input Parameter:
6838: . mat - the matrix
6840: Output Parameters:
6841: + m - the number of local rows, use `NULL` to not obtain this value
6842: - n - the number of local columns, use `NULL` to not obtain this value
6844: Level: beginner
6846: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6847: @*/
6848: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6849: {
6850: PetscFunctionBegin;
6852: if (m) PetscAssertPointer(m, 2);
6853: if (n) PetscAssertPointer(n, 3);
6854: if (m) *m = mat->rmap->n;
6855: if (n) *n = mat->cmap->n;
6856: PetscFunctionReturn(PETSC_SUCCESS);
6857: }
6859: /*@
6860: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6861: vector one multiplies this matrix by that are owned by this processor.
6863: Not Collective, unless matrix has not been allocated, then collective
6865: Input Parameter:
6866: . mat - the matrix
6868: Output Parameters:
6869: + m - the global index of the first local column, use `NULL` to not obtain this value
6870: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6872: Level: developer
6874: Notes:
6875: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6877: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6878: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6880: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6881: the local values in the matrix.
6883: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6884: Layouts](sec_matlayout) for details on matrix layouts.
6886: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6887: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6888: @*/
6889: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6890: {
6891: PetscFunctionBegin;
6894: if (m) PetscAssertPointer(m, 2);
6895: if (n) PetscAssertPointer(n, 3);
6896: MatCheckPreallocated(mat, 1);
6897: if (m) *m = mat->cmap->rstart;
6898: if (n) *n = mat->cmap->rend;
6899: PetscFunctionReturn(PETSC_SUCCESS);
6900: }
6902: /*@
6903: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6904: this MPI process.
6906: Not Collective
6908: Input Parameter:
6909: . mat - the matrix
6911: Output Parameters:
6912: + m - the global index of the first local row, use `NULL` to not obtain this value
6913: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6915: Level: beginner
6917: Notes:
6918: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6920: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6921: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6923: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6924: the local values in the matrix.
6926: The high argument is one more than the last element stored locally.
6928: For all matrices it returns the range of matrix rows associated with rows of a vector that
6929: would contain the result of a matrix vector product with this matrix. See [Matrix
6930: Layouts](sec_matlayout) for details on matrix layouts.
6932: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
6933: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6934: @*/
6935: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6936: {
6937: PetscFunctionBegin;
6940: if (m) PetscAssertPointer(m, 2);
6941: if (n) PetscAssertPointer(n, 3);
6942: MatCheckPreallocated(mat, 1);
6943: if (m) *m = mat->rmap->rstart;
6944: if (n) *n = mat->rmap->rend;
6945: PetscFunctionReturn(PETSC_SUCCESS);
6946: }
6948: /*@C
6949: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6950: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6952: Not Collective, unless matrix has not been allocated
6954: Input Parameter:
6955: . mat - the matrix
6957: Output Parameter:
6958: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
6959: where `size` is the number of MPI processes used by `mat`
6961: Level: beginner
6963: Notes:
6964: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6966: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6967: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6969: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6970: the local values in the matrix.
6972: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6973: would contain the result of a matrix vector product with this matrix. See [Matrix
6974: Layouts](sec_matlayout) for details on matrix layouts.
6976: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6977: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
6978: `DMDAGetGhostCorners()`, `DM`
6979: @*/
6980: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
6981: {
6982: PetscFunctionBegin;
6985: MatCheckPreallocated(mat, 1);
6986: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6987: PetscFunctionReturn(PETSC_SUCCESS);
6988: }
6990: /*@C
6991: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
6992: vector one multiplies this vector by that are owned by each processor.
6994: Not Collective, unless matrix has not been allocated
6996: Input Parameter:
6997: . mat - the matrix
6999: Output Parameter:
7000: . ranges - start of each processors portion plus one more than the total length at the end
7002: Level: beginner
7004: Notes:
7005: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7007: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7008: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7010: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7011: the local values in the matrix.
7013: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
7014: Layouts](sec_matlayout) for details on matrix layouts.
7016: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
7017: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
7018: `DMDAGetGhostCorners()`, `DM`
7019: @*/
7020: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
7021: {
7022: PetscFunctionBegin;
7025: MatCheckPreallocated(mat, 1);
7026: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
7027: PetscFunctionReturn(PETSC_SUCCESS);
7028: }
7030: /*@
7031: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7033: Not Collective
7035: Input Parameter:
7036: . A - matrix
7038: Output Parameters:
7039: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7040: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7042: Level: intermediate
7044: Note:
7045: You should call `ISDestroy()` on the returned `IS`
7047: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7048: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7049: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7050: details on matrix layouts.
7052: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7053: @*/
7054: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7055: {
7056: PetscErrorCode (*f)(Mat, IS *, IS *);
7058: PetscFunctionBegin;
7061: MatCheckPreallocated(A, 1);
7062: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7063: if (f) {
7064: PetscCall((*f)(A, rows, cols));
7065: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7066: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7067: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7068: }
7069: PetscFunctionReturn(PETSC_SUCCESS);
7070: }
7072: /*@
7073: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7074: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7075: to complete the factorization.
7077: Collective
7079: Input Parameters:
7080: + fact - the factorized matrix obtained with `MatGetFactor()`
7081: . mat - the matrix
7082: . row - row permutation
7083: . col - column permutation
7084: - info - structure containing
7085: .vb
7086: levels - number of levels of fill.
7087: expected fill - as ratio of original fill.
7088: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7089: missing diagonal entries)
7090: .ve
7092: Level: developer
7094: Notes:
7095: See [Matrix Factorization](sec_matfactor) for additional information.
7097: Most users should employ the `KSP` interface for linear solvers
7098: instead of working directly with matrix algebra routines such as this.
7099: See, e.g., `KSPCreate()`.
7101: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7103: Developer Note:
7104: The Fortran interface is not autogenerated as the
7105: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7107: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
7108: `MatGetOrdering()`, `MatFactorInfo`
7109: @*/
7110: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7111: {
7112: PetscFunctionBegin;
7117: PetscAssertPointer(info, 5);
7118: PetscAssertPointer(fact, 1);
7119: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7120: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7121: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7122: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7123: MatCheckPreallocated(mat, 2);
7125: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7126: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7127: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7128: PetscFunctionReturn(PETSC_SUCCESS);
7129: }
7131: /*@
7132: MatICCFactorSymbolic - Performs symbolic incomplete
7133: Cholesky factorization for a symmetric matrix. Use
7134: `MatCholeskyFactorNumeric()` to complete the factorization.
7136: Collective
7138: Input Parameters:
7139: + fact - the factorized matrix obtained with `MatGetFactor()`
7140: . mat - the matrix to be factored
7141: . perm - row and column permutation
7142: - info - structure containing
7143: .vb
7144: levels - number of levels of fill.
7145: expected fill - as ratio of original fill.
7146: .ve
7148: Level: developer
7150: Notes:
7151: Most users should employ the `KSP` interface for linear solvers
7152: instead of working directly with matrix algebra routines such as this.
7153: See, e.g., `KSPCreate()`.
7155: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7157: Developer Note:
7158: The Fortran interface is not autogenerated as the
7159: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7161: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7162: @*/
7163: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7164: {
7165: PetscFunctionBegin;
7169: PetscAssertPointer(info, 4);
7170: PetscAssertPointer(fact, 1);
7171: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7172: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7173: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7174: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7175: MatCheckPreallocated(mat, 2);
7177: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7178: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7179: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7180: PetscFunctionReturn(PETSC_SUCCESS);
7181: }
7183: /*@C
7184: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7185: points to an array of valid matrices, they may be reused to store the new
7186: submatrices.
7188: Collective
7190: Input Parameters:
7191: + mat - the matrix
7192: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7193: . irow - index set of rows to extract
7194: . icol - index set of columns to extract
7195: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7197: Output Parameter:
7198: . submat - the array of submatrices
7200: Level: advanced
7202: Notes:
7203: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7204: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7205: to extract a parallel submatrix.
7207: Some matrix types place restrictions on the row and column
7208: indices, such as that they be sorted or that they be equal to each other.
7210: The index sets may not have duplicate entries.
7212: When extracting submatrices from a parallel matrix, each processor can
7213: form a different submatrix by setting the rows and columns of its
7214: individual index sets according to the local submatrix desired.
7216: When finished using the submatrices, the user should destroy
7217: them with `MatDestroySubMatrices()`.
7219: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7220: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7222: This routine creates the matrices in submat; you should NOT create them before
7223: calling it. It also allocates the array of matrix pointers submat.
7225: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7226: request one row/column in a block, they must request all rows/columns that are in
7227: that block. For example, if the block size is 2 you cannot request just row 0 and
7228: column 0.
7230: Fortran Note:
7231: .vb
7232: Mat, pointer :: submat(:)
7233: .ve
7235: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7236: @*/
7237: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7238: {
7239: PetscInt i;
7240: PetscBool eq;
7242: PetscFunctionBegin;
7245: if (n) {
7246: PetscAssertPointer(irow, 3);
7248: PetscAssertPointer(icol, 4);
7250: }
7251: PetscAssertPointer(submat, 6);
7252: if (n && scall == MAT_REUSE_MATRIX) {
7253: PetscAssertPointer(*submat, 6);
7255: }
7256: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7257: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7258: MatCheckPreallocated(mat, 1);
7259: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7260: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7261: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7262: for (i = 0; i < n; i++) {
7263: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7264: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7265: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7266: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7267: if (mat->boundtocpu && mat->bindingpropagates) {
7268: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7269: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7270: }
7271: #endif
7272: }
7273: PetscFunctionReturn(PETSC_SUCCESS);
7274: }
7276: /*@C
7277: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of `mat` (by pairs of `IS` that may live on subcomms).
7279: Collective
7281: Input Parameters:
7282: + mat - the matrix
7283: . n - the number of submatrixes to be extracted
7284: . irow - index set of rows to extract
7285: . icol - index set of columns to extract
7286: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7288: Output Parameter:
7289: . submat - the array of submatrices
7291: Level: advanced
7293: Note:
7294: This is used by `PCGASM`
7296: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7297: @*/
7298: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7299: {
7300: PetscInt i;
7301: PetscBool eq;
7303: PetscFunctionBegin;
7306: if (n) {
7307: PetscAssertPointer(irow, 3);
7309: PetscAssertPointer(icol, 4);
7311: }
7312: PetscAssertPointer(submat, 6);
7313: if (n && scall == MAT_REUSE_MATRIX) {
7314: PetscAssertPointer(*submat, 6);
7316: }
7317: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7318: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7319: MatCheckPreallocated(mat, 1);
7321: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7322: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7323: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7324: for (i = 0; i < n; i++) {
7325: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7326: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7327: }
7328: PetscFunctionReturn(PETSC_SUCCESS);
7329: }
7331: /*@C
7332: MatDestroyMatrices - Destroys an array of matrices
7334: Collective
7336: Input Parameters:
7337: + n - the number of local matrices
7338: - mat - the matrices (this is a pointer to the array of matrices)
7340: Level: advanced
7342: Notes:
7343: Frees not only the matrices, but also the array that contains the matrices
7345: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7347: Fortran Note:
7348: Does not free the `mat` array.
7350: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7351: @*/
7352: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7353: {
7354: PetscInt i;
7356: PetscFunctionBegin;
7357: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7358: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7359: PetscAssertPointer(mat, 2);
7361: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7363: /* memory is allocated even if n = 0 */
7364: PetscCall(PetscFree(*mat));
7365: PetscFunctionReturn(PETSC_SUCCESS);
7366: }
7368: /*@C
7369: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7371: Collective
7373: Input Parameters:
7374: + n - the number of local matrices
7375: - mat - the matrices (this is a pointer to the array of matrices, to match the calling sequence of `MatCreateSubMatrices()`)
7377: Level: advanced
7379: Note:
7380: Frees not only the matrices, but also the array that contains the matrices
7382: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7383: @*/
7384: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7385: {
7386: Mat mat0;
7388: PetscFunctionBegin;
7389: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7390: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7391: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7392: PetscAssertPointer(mat, 2);
7394: mat0 = (*mat)[0];
7395: if (mat0 && mat0->ops->destroysubmatrices) {
7396: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7397: } else {
7398: PetscCall(MatDestroyMatrices(n, mat));
7399: }
7400: PetscFunctionReturn(PETSC_SUCCESS);
7401: }
7403: /*@
7404: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7406: Collective
7408: Input Parameter:
7409: . mat - the matrix
7411: Output Parameter:
7412: . matstruct - the sequential matrix with the nonzero structure of `mat`
7414: Level: developer
7416: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7417: @*/
7418: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7419: {
7420: PetscFunctionBegin;
7422: PetscAssertPointer(matstruct, 2);
7425: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7426: MatCheckPreallocated(mat, 1);
7428: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7429: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7430: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7431: PetscFunctionReturn(PETSC_SUCCESS);
7432: }
7434: /*@C
7435: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7437: Collective
7439: Input Parameter:
7440: . mat - the matrix
7442: Level: advanced
7444: Note:
7445: This is not needed, one can just call `MatDestroy()`
7447: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7448: @*/
7449: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7450: {
7451: PetscFunctionBegin;
7452: PetscAssertPointer(mat, 1);
7453: PetscCall(MatDestroy(mat));
7454: PetscFunctionReturn(PETSC_SUCCESS);
7455: }
7457: /*@
7458: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7459: replaces the index sets by larger ones that represent submatrices with
7460: additional overlap.
7462: Collective
7464: Input Parameters:
7465: + mat - the matrix
7466: . n - the number of index sets
7467: . is - the array of index sets (these index sets will changed during the call)
7468: - ov - the additional overlap requested
7470: Options Database Key:
7471: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7473: Level: developer
7475: Note:
7476: The computed overlap preserves the matrix block sizes when the blocks are square.
7477: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7478: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7480: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7481: @*/
7482: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7483: {
7484: PetscInt i, bs, cbs;
7486: PetscFunctionBegin;
7490: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7491: if (n) {
7492: PetscAssertPointer(is, 3);
7494: }
7495: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7496: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7497: MatCheckPreallocated(mat, 1);
7499: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7500: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7501: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7502: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7503: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7504: if (bs == cbs) {
7505: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7506: }
7507: PetscFunctionReturn(PETSC_SUCCESS);
7508: }
7510: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7512: /*@
7513: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7514: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7515: additional overlap.
7517: Collective
7519: Input Parameters:
7520: + mat - the matrix
7521: . n - the number of index sets
7522: . is - the array of index sets (these index sets will changed during the call)
7523: - ov - the additional overlap requested
7525: ` Options Database Key:
7526: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7528: Level: developer
7530: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7531: @*/
7532: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7533: {
7534: PetscInt i;
7536: PetscFunctionBegin;
7539: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7540: if (n) {
7541: PetscAssertPointer(is, 3);
7543: }
7544: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7545: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7546: MatCheckPreallocated(mat, 1);
7547: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7548: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7549: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7550: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7551: PetscFunctionReturn(PETSC_SUCCESS);
7552: }
7554: /*@
7555: MatGetBlockSize - Returns the matrix block size.
7557: Not Collective
7559: Input Parameter:
7560: . mat - the matrix
7562: Output Parameter:
7563: . bs - block size
7565: Level: intermediate
7567: Notes:
7568: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7570: If the block size has not been set yet this routine returns 1.
7572: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7573: @*/
7574: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7575: {
7576: PetscFunctionBegin;
7578: PetscAssertPointer(bs, 2);
7579: *bs = mat->rmap->bs;
7580: PetscFunctionReturn(PETSC_SUCCESS);
7581: }
7583: /*@
7584: MatGetBlockSizes - Returns the matrix block row and column sizes.
7586: Not Collective
7588: Input Parameter:
7589: . mat - the matrix
7591: Output Parameters:
7592: + rbs - row block size
7593: - cbs - column block size
7595: Level: intermediate
7597: Notes:
7598: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7599: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7601: If a block size has not been set yet this routine returns 1.
7603: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7604: @*/
7605: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7606: {
7607: PetscFunctionBegin;
7609: if (rbs) PetscAssertPointer(rbs, 2);
7610: if (cbs) PetscAssertPointer(cbs, 3);
7611: if (rbs) *rbs = mat->rmap->bs;
7612: if (cbs) *cbs = mat->cmap->bs;
7613: PetscFunctionReturn(PETSC_SUCCESS);
7614: }
7616: /*@
7617: MatSetBlockSize - Sets the matrix block size.
7619: Logically Collective
7621: Input Parameters:
7622: + mat - the matrix
7623: - bs - block size
7625: Level: intermediate
7627: Notes:
7628: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7629: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7631: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7632: is compatible with the matrix local sizes.
7634: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7635: @*/
7636: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7637: {
7638: PetscFunctionBegin;
7641: PetscCall(MatSetBlockSizes(mat, bs, bs));
7642: PetscFunctionReturn(PETSC_SUCCESS);
7643: }
7645: typedef struct {
7646: PetscInt n;
7647: IS *is;
7648: Mat *mat;
7649: PetscObjectState nonzerostate;
7650: Mat C;
7651: } EnvelopeData;
7653: static PetscErrorCode EnvelopeDataDestroy(void **ptr)
7654: {
7655: EnvelopeData *edata = (EnvelopeData *)*ptr;
7657: PetscFunctionBegin;
7658: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7659: PetscCall(PetscFree(edata->is));
7660: PetscCall(PetscFree(edata));
7661: PetscFunctionReturn(PETSC_SUCCESS);
7662: }
7664: /*@
7665: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7666: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7668: Collective
7670: Input Parameter:
7671: . mat - the matrix
7673: Level: intermediate
7675: Notes:
7676: There can be zeros within the blocks
7678: The blocks can overlap between processes, including laying on more than two processes
7680: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7681: @*/
7682: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7683: {
7684: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7685: PetscInt *diag, *odiag, sc;
7686: VecScatter scatter;
7687: PetscScalar *seqv;
7688: const PetscScalar *parv;
7689: const PetscInt *ia, *ja;
7690: PetscBool set, flag, done;
7691: Mat AA = mat, A;
7692: MPI_Comm comm;
7693: PetscMPIInt rank, size, tag;
7694: MPI_Status status;
7695: PetscContainer container;
7696: EnvelopeData *edata;
7697: Vec seq, par;
7698: IS isglobal;
7700: PetscFunctionBegin;
7702: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7703: if (!set || !flag) {
7704: /* TODO: only needs nonzero structure of transpose */
7705: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7706: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7707: }
7708: PetscCall(MatAIJGetLocalMat(AA, &A));
7709: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7710: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7712: PetscCall(MatGetLocalSize(mat, &n, NULL));
7713: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7714: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7715: PetscCallMPI(MPI_Comm_size(comm, &size));
7716: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7718: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7720: if (rank > 0) {
7721: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7722: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7723: }
7724: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7725: for (i = 0; i < n; i++) {
7726: env = PetscMax(env, ja[ia[i + 1] - 1]);
7727: II = rstart + i;
7728: if (env == II) {
7729: starts[lblocks] = tbs;
7730: sizes[lblocks++] = 1 + II - tbs;
7731: tbs = 1 + II;
7732: }
7733: }
7734: if (rank < size - 1) {
7735: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7736: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7737: }
7739: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7740: if (!set || !flag) PetscCall(MatDestroy(&AA));
7741: PetscCall(MatDestroy(&A));
7743: PetscCall(PetscNew(&edata));
7744: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7745: edata->n = lblocks;
7746: /* create IS needed for extracting blocks from the original matrix */
7747: PetscCall(PetscMalloc1(lblocks, &edata->is));
7748: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7750: /* Create the resulting inverse matrix nonzero structure with preallocation information */
7751: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7752: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7753: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7754: PetscCall(MatSetType(edata->C, MATAIJ));
7756: /* Communicate the start and end of each row, from each block to the correct rank */
7757: /* TODO: Use PetscSF instead of VecScatter */
7758: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7759: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7760: PetscCall(VecGetArrayWrite(seq, &seqv));
7761: for (PetscInt i = 0; i < lblocks; i++) {
7762: for (PetscInt j = 0; j < sizes[i]; j++) {
7763: seqv[cnt] = starts[i];
7764: seqv[cnt + 1] = starts[i] + sizes[i];
7765: cnt += 2;
7766: }
7767: }
7768: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7769: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7770: sc -= cnt;
7771: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7772: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7773: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7774: PetscCall(ISDestroy(&isglobal));
7775: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7776: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7777: PetscCall(VecScatterDestroy(&scatter));
7778: PetscCall(VecDestroy(&seq));
7779: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7780: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7781: PetscCall(VecGetArrayRead(par, &parv));
7782: cnt = 0;
7783: PetscCall(MatGetSize(mat, NULL, &n));
7784: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7785: PetscInt start, end, d = 0, od = 0;
7787: start = (PetscInt)PetscRealPart(parv[cnt]);
7788: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7789: cnt += 2;
7791: if (start < cstart) {
7792: od += cstart - start + n - cend;
7793: d += cend - cstart;
7794: } else if (start < cend) {
7795: od += n - cend;
7796: d += cend - start;
7797: } else od += n - start;
7798: if (end <= cstart) {
7799: od -= cstart - end + n - cend;
7800: d -= cend - cstart;
7801: } else if (end < cend) {
7802: od -= n - cend;
7803: d -= cend - end;
7804: } else od -= n - end;
7806: odiag[i] = od;
7807: diag[i] = d;
7808: }
7809: PetscCall(VecRestoreArrayRead(par, &parv));
7810: PetscCall(VecDestroy(&par));
7811: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7812: PetscCall(PetscFree2(diag, odiag));
7813: PetscCall(PetscFree2(sizes, starts));
7815: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7816: PetscCall(PetscContainerSetPointer(container, edata));
7817: PetscCall(PetscContainerSetCtxDestroy(container, EnvelopeDataDestroy));
7818: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7819: PetscCall(PetscObjectDereference((PetscObject)container));
7820: PetscFunctionReturn(PETSC_SUCCESS);
7821: }
7823: /*@
7824: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7826: Collective
7828: Input Parameters:
7829: + A - the matrix
7830: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7832: Output Parameter:
7833: . C - matrix with inverted block diagonal of `A`
7835: Level: advanced
7837: Note:
7838: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7840: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7841: @*/
7842: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7843: {
7844: PetscContainer container;
7845: EnvelopeData *edata;
7846: PetscObjectState nonzerostate;
7848: PetscFunctionBegin;
7849: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7850: if (!container) {
7851: PetscCall(MatComputeVariableBlockEnvelope(A));
7852: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7853: }
7854: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7855: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7856: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7857: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7859: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7860: *C = edata->C;
7862: for (PetscInt i = 0; i < edata->n; i++) {
7863: Mat D;
7864: PetscScalar *dvalues;
7866: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7867: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7868: PetscCall(MatSeqDenseInvert(D));
7869: PetscCall(MatDenseGetArray(D, &dvalues));
7870: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7871: PetscCall(MatDestroy(&D));
7872: }
7873: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7874: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7875: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7876: PetscFunctionReturn(PETSC_SUCCESS);
7877: }
7879: /*@
7880: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7882: Not Collective
7884: Input Parameters:
7885: + mat - the matrix
7886: . nblocks - the number of blocks on this process, each block can only exist on a single process
7887: - bsizes - the block sizes
7889: Level: intermediate
7891: Notes:
7892: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7894: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7896: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7897: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7898: @*/
7899: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
7900: {
7901: PetscInt ncnt = 0, nlocal;
7903: PetscFunctionBegin;
7905: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7906: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
7907: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
7908: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7909: PetscCall(PetscFree(mat->bsizes));
7910: mat->nblocks = nblocks;
7911: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7912: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7913: PetscFunctionReturn(PETSC_SUCCESS);
7914: }
7916: /*@C
7917: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7919: Not Collective; No Fortran Support
7921: Input Parameter:
7922: . mat - the matrix
7924: Output Parameters:
7925: + nblocks - the number of blocks on this process
7926: - bsizes - the block sizes
7928: Level: intermediate
7930: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7931: @*/
7932: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
7933: {
7934: PetscFunctionBegin;
7936: if (nblocks) *nblocks = mat->nblocks;
7937: if (bsizes) *bsizes = mat->bsizes;
7938: PetscFunctionReturn(PETSC_SUCCESS);
7939: }
7941: /*@
7942: MatSetBlockSizes - Sets the matrix block row and column sizes.
7944: Logically Collective
7946: Input Parameters:
7947: + mat - the matrix
7948: . rbs - row block size
7949: - cbs - column block size
7951: Level: intermediate
7953: Notes:
7954: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7955: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7956: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7958: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7959: are compatible with the matrix local sizes.
7961: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
7963: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7964: @*/
7965: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
7966: {
7967: PetscFunctionBegin;
7971: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
7972: if (mat->rmap->refcnt) {
7973: ISLocalToGlobalMapping l2g = NULL;
7974: PetscLayout nmap = NULL;
7976: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
7977: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
7978: PetscCall(PetscLayoutDestroy(&mat->rmap));
7979: mat->rmap = nmap;
7980: mat->rmap->mapping = l2g;
7981: }
7982: if (mat->cmap->refcnt) {
7983: ISLocalToGlobalMapping l2g = NULL;
7984: PetscLayout nmap = NULL;
7986: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
7987: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
7988: PetscCall(PetscLayoutDestroy(&mat->cmap));
7989: mat->cmap = nmap;
7990: mat->cmap->mapping = l2g;
7991: }
7992: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
7993: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
7994: PetscFunctionReturn(PETSC_SUCCESS);
7995: }
7997: /*@
7998: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
8000: Logically Collective
8002: Input Parameters:
8003: + mat - the matrix
8004: . fromRow - matrix from which to copy row block size
8005: - fromCol - matrix from which to copy column block size (can be same as fromRow)
8007: Level: developer
8009: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
8010: @*/
8011: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
8012: {
8013: PetscFunctionBegin;
8017: PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
8018: PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
8019: PetscFunctionReturn(PETSC_SUCCESS);
8020: }
8022: /*@
8023: MatResidual - Default routine to calculate the residual r = b - Ax
8025: Collective
8027: Input Parameters:
8028: + mat - the matrix
8029: . b - the right-hand-side
8030: - x - the approximate solution
8032: Output Parameter:
8033: . r - location to store the residual
8035: Level: developer
8037: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8038: @*/
8039: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8040: {
8041: PetscFunctionBegin;
8047: MatCheckPreallocated(mat, 1);
8048: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8049: if (!mat->ops->residual) {
8050: PetscCall(MatMult(mat, x, r));
8051: PetscCall(VecAYPX(r, -1.0, b));
8052: } else {
8053: PetscUseTypeMethod(mat, residual, b, x, r);
8054: }
8055: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8056: PetscFunctionReturn(PETSC_SUCCESS);
8057: }
8059: /*@C
8060: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8062: Collective
8064: Input Parameters:
8065: + mat - the matrix
8066: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8067: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8068: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8069: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8070: always used.
8072: Output Parameters:
8073: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8074: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8075: . ja - the column indices, use `NULL` if not needed
8076: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8077: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8079: Level: developer
8081: Notes:
8082: You CANNOT change any of the ia[] or ja[] values.
8084: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8086: Fortran Notes:
8087: Use
8088: .vb
8089: PetscInt, pointer :: ia(:),ja(:)
8090: call MatGetRowIJ(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8091: ! Access the ith and jth entries via ia(i) and ja(j)
8092: .ve
8094: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8095: @*/
8096: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8097: {
8098: PetscFunctionBegin;
8101: if (n) PetscAssertPointer(n, 5);
8102: if (ia) PetscAssertPointer(ia, 6);
8103: if (ja) PetscAssertPointer(ja, 7);
8104: if (done) PetscAssertPointer(done, 8);
8105: MatCheckPreallocated(mat, 1);
8106: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8107: else {
8108: if (done) *done = PETSC_TRUE;
8109: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8110: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8111: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8112: }
8113: PetscFunctionReturn(PETSC_SUCCESS);
8114: }
8116: /*@C
8117: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8119: Collective
8121: Input Parameters:
8122: + mat - the matrix
8123: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8124: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8125: symmetrized
8126: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8127: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8128: always used.
8129: . n - number of columns in the (possibly compressed) matrix
8130: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8131: - ja - the row indices
8133: Output Parameter:
8134: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8136: Level: developer
8138: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8139: @*/
8140: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8141: {
8142: PetscFunctionBegin;
8145: PetscAssertPointer(n, 5);
8146: if (ia) PetscAssertPointer(ia, 6);
8147: if (ja) PetscAssertPointer(ja, 7);
8148: PetscAssertPointer(done, 8);
8149: MatCheckPreallocated(mat, 1);
8150: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8151: else {
8152: *done = PETSC_TRUE;
8153: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8154: }
8155: PetscFunctionReturn(PETSC_SUCCESS);
8156: }
8158: /*@C
8159: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8161: Collective
8163: Input Parameters:
8164: + mat - the matrix
8165: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8166: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8167: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8168: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8169: always used.
8170: . n - size of (possibly compressed) matrix
8171: . ia - the row pointers
8172: - ja - the column indices
8174: Output Parameter:
8175: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8177: Level: developer
8179: Note:
8180: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8181: us of the array after it has been restored. If you pass `NULL`, it will
8182: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8184: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8185: @*/
8186: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8187: {
8188: PetscFunctionBegin;
8191: if (ia) PetscAssertPointer(ia, 6);
8192: if (ja) PetscAssertPointer(ja, 7);
8193: if (done) PetscAssertPointer(done, 8);
8194: MatCheckPreallocated(mat, 1);
8196: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8197: else {
8198: if (done) *done = PETSC_TRUE;
8199: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8200: if (n) *n = 0;
8201: if (ia) *ia = NULL;
8202: if (ja) *ja = NULL;
8203: }
8204: PetscFunctionReturn(PETSC_SUCCESS);
8205: }
8207: /*@C
8208: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8210: Collective
8212: Input Parameters:
8213: + mat - the matrix
8214: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8215: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8216: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8217: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8218: always used.
8220: Output Parameters:
8221: + n - size of (possibly compressed) matrix
8222: . ia - the column pointers
8223: . ja - the row indices
8224: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8226: Level: developer
8228: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8229: @*/
8230: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8231: {
8232: PetscFunctionBegin;
8235: if (ia) PetscAssertPointer(ia, 6);
8236: if (ja) PetscAssertPointer(ja, 7);
8237: PetscAssertPointer(done, 8);
8238: MatCheckPreallocated(mat, 1);
8240: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8241: else {
8242: *done = PETSC_TRUE;
8243: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8244: if (n) *n = 0;
8245: if (ia) *ia = NULL;
8246: if (ja) *ja = NULL;
8247: }
8248: PetscFunctionReturn(PETSC_SUCCESS);
8249: }
8251: /*@
8252: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8253: `MatGetColumnIJ()`.
8255: Collective
8257: Input Parameters:
8258: + mat - the matrix
8259: . ncolors - maximum color value
8260: . n - number of entries in colorarray
8261: - colorarray - array indicating color for each column
8263: Output Parameter:
8264: . iscoloring - coloring generated using colorarray information
8266: Level: developer
8268: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8269: @*/
8270: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8271: {
8272: PetscFunctionBegin;
8275: PetscAssertPointer(colorarray, 4);
8276: PetscAssertPointer(iscoloring, 5);
8277: MatCheckPreallocated(mat, 1);
8279: if (!mat->ops->coloringpatch) {
8280: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8281: } else {
8282: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8283: }
8284: PetscFunctionReturn(PETSC_SUCCESS);
8285: }
8287: /*@
8288: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8290: Logically Collective
8292: Input Parameter:
8293: . mat - the factored matrix to be reset
8295: Level: developer
8297: Notes:
8298: This routine should be used only with factored matrices formed by in-place
8299: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8300: format). This option can save memory, for example, when solving nonlinear
8301: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8302: ILU(0) preconditioner.
8304: One can specify in-place ILU(0) factorization by calling
8305: .vb
8306: PCType(pc,PCILU);
8307: PCFactorSeUseInPlace(pc);
8308: .ve
8309: or by using the options -pc_type ilu -pc_factor_in_place
8311: In-place factorization ILU(0) can also be used as a local
8312: solver for the blocks within the block Jacobi or additive Schwarz
8313: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8314: for details on setting local solver options.
8316: Most users should employ the `KSP` interface for linear solvers
8317: instead of working directly with matrix algebra routines such as this.
8318: See, e.g., `KSPCreate()`.
8320: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8321: @*/
8322: PetscErrorCode MatSetUnfactored(Mat mat)
8323: {
8324: PetscFunctionBegin;
8327: MatCheckPreallocated(mat, 1);
8328: mat->factortype = MAT_FACTOR_NONE;
8329: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8330: PetscUseTypeMethod(mat, setunfactored);
8331: PetscFunctionReturn(PETSC_SUCCESS);
8332: }
8334: /*@
8335: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8336: as the original matrix.
8338: Collective
8340: Input Parameters:
8341: + mat - the original matrix
8342: . isrow - parallel `IS` containing the rows this processor should obtain
8343: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8344: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8346: Output Parameter:
8347: . newmat - the new submatrix, of the same type as the original matrix
8349: Level: advanced
8351: Notes:
8352: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8354: Some matrix types place restrictions on the row and column indices, such
8355: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8356: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8358: The index sets may not have duplicate entries.
8360: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8361: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8362: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8363: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8364: you are finished using it.
8366: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8367: the input matrix.
8369: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8371: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8372: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8374: Example usage:
8375: Consider the following 8x8 matrix with 34 non-zero values, that is
8376: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8377: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8378: as follows
8379: .vb
8380: 1 2 0 | 0 3 0 | 0 4
8381: Proc0 0 5 6 | 7 0 0 | 8 0
8382: 9 0 10 | 11 0 0 | 12 0
8383: -------------------------------------
8384: 13 0 14 | 15 16 17 | 0 0
8385: Proc1 0 18 0 | 19 20 21 | 0 0
8386: 0 0 0 | 22 23 0 | 24 0
8387: -------------------------------------
8388: Proc2 25 26 27 | 0 0 28 | 29 0
8389: 30 0 0 | 31 32 33 | 0 34
8390: .ve
8392: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8394: .vb
8395: 2 0 | 0 3 0 | 0
8396: Proc0 5 6 | 7 0 0 | 8
8397: -------------------------------
8398: Proc1 18 0 | 19 20 21 | 0
8399: -------------------------------
8400: Proc2 26 27 | 0 0 28 | 29
8401: 0 0 | 31 32 33 | 0
8402: .ve
8404: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8405: @*/
8406: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8407: {
8408: PetscMPIInt size;
8409: Mat *local;
8410: IS iscoltmp;
8411: PetscBool flg;
8413: PetscFunctionBegin;
8417: PetscAssertPointer(newmat, 5);
8420: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8421: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8423: MatCheckPreallocated(mat, 1);
8424: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8426: if (!iscol || isrow == iscol) {
8427: PetscBool stride;
8428: PetscMPIInt grabentirematrix = 0, grab;
8429: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8430: if (stride) {
8431: PetscInt first, step, n, rstart, rend;
8432: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8433: if (step == 1) {
8434: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8435: if (rstart == first) {
8436: PetscCall(ISGetLocalSize(isrow, &n));
8437: if (n == rend - rstart) grabentirematrix = 1;
8438: }
8439: }
8440: }
8441: PetscCallMPI(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8442: if (grab) {
8443: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8444: if (cll == MAT_INITIAL_MATRIX) {
8445: *newmat = mat;
8446: PetscCall(PetscObjectReference((PetscObject)mat));
8447: }
8448: PetscFunctionReturn(PETSC_SUCCESS);
8449: }
8450: }
8452: if (!iscol) {
8453: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8454: } else {
8455: iscoltmp = iscol;
8456: }
8458: /* if original matrix is on just one processor then use submatrix generated */
8459: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8460: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8461: goto setproperties;
8462: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8463: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8464: *newmat = *local;
8465: PetscCall(PetscFree(local));
8466: goto setproperties;
8467: } else if (!mat->ops->createsubmatrix) {
8468: /* Create a new matrix type that implements the operation using the full matrix */
8469: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8470: switch (cll) {
8471: case MAT_INITIAL_MATRIX:
8472: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8473: break;
8474: case MAT_REUSE_MATRIX:
8475: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8476: break;
8477: default:
8478: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8479: }
8480: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8481: goto setproperties;
8482: }
8484: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8485: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8486: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8488: setproperties:
8489: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8490: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8491: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8492: }
8493: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8494: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8495: PetscFunctionReturn(PETSC_SUCCESS);
8496: }
8498: /*@
8499: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8501: Not Collective
8503: Input Parameters:
8504: + A - the matrix we wish to propagate options from
8505: - B - the matrix we wish to propagate options to
8507: Level: beginner
8509: Note:
8510: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8512: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8513: @*/
8514: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8515: {
8516: PetscFunctionBegin;
8519: B->symmetry_eternal = A->symmetry_eternal;
8520: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8521: B->symmetric = A->symmetric;
8522: B->structurally_symmetric = A->structurally_symmetric;
8523: B->spd = A->spd;
8524: B->hermitian = A->hermitian;
8525: PetscFunctionReturn(PETSC_SUCCESS);
8526: }
8528: /*@
8529: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8530: used during the assembly process to store values that belong to
8531: other processors.
8533: Not Collective
8535: Input Parameters:
8536: + mat - the matrix
8537: . size - the initial size of the stash.
8538: - bsize - the initial size of the block-stash(if used).
8540: Options Database Keys:
8541: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8542: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8544: Level: intermediate
8546: Notes:
8547: The block-stash is used for values set with `MatSetValuesBlocked()` while
8548: the stash is used for values set with `MatSetValues()`
8550: Run with the option -info and look for output of the form
8551: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8552: to determine the appropriate value, MM, to use for size and
8553: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8554: to determine the value, BMM to use for bsize
8556: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8557: @*/
8558: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8559: {
8560: PetscFunctionBegin;
8563: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8564: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8565: PetscFunctionReturn(PETSC_SUCCESS);
8566: }
8568: /*@
8569: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8570: the matrix
8572: Neighbor-wise Collective
8574: Input Parameters:
8575: + A - the matrix
8576: . x - the vector to be multiplied by the interpolation operator
8577: - y - the vector to be added to the result
8579: Output Parameter:
8580: . w - the resulting vector
8582: Level: intermediate
8584: Notes:
8585: `w` may be the same vector as `y`.
8587: This allows one to use either the restriction or interpolation (its transpose)
8588: matrix to do the interpolation
8590: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8591: @*/
8592: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8593: {
8594: PetscInt M, N, Ny;
8596: PetscFunctionBegin;
8601: PetscCall(MatGetSize(A, &M, &N));
8602: PetscCall(VecGetSize(y, &Ny));
8603: if (M == Ny) {
8604: PetscCall(MatMultAdd(A, x, y, w));
8605: } else {
8606: PetscCall(MatMultTransposeAdd(A, x, y, w));
8607: }
8608: PetscFunctionReturn(PETSC_SUCCESS);
8609: }
8611: /*@
8612: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8613: the matrix
8615: Neighbor-wise Collective
8617: Input Parameters:
8618: + A - the matrix
8619: - x - the vector to be interpolated
8621: Output Parameter:
8622: . y - the resulting vector
8624: Level: intermediate
8626: Note:
8627: This allows one to use either the restriction or interpolation (its transpose)
8628: matrix to do the interpolation
8630: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8631: @*/
8632: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8633: {
8634: PetscInt M, N, Ny;
8636: PetscFunctionBegin;
8640: PetscCall(MatGetSize(A, &M, &N));
8641: PetscCall(VecGetSize(y, &Ny));
8642: if (M == Ny) {
8643: PetscCall(MatMult(A, x, y));
8644: } else {
8645: PetscCall(MatMultTranspose(A, x, y));
8646: }
8647: PetscFunctionReturn(PETSC_SUCCESS);
8648: }
8650: /*@
8651: MatRestrict - $y = A*x$ or $A^T*x$
8653: Neighbor-wise Collective
8655: Input Parameters:
8656: + A - the matrix
8657: - x - the vector to be restricted
8659: Output Parameter:
8660: . y - the resulting vector
8662: Level: intermediate
8664: Note:
8665: This allows one to use either the restriction or interpolation (its transpose)
8666: matrix to do the restriction
8668: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8669: @*/
8670: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8671: {
8672: PetscInt M, N, Nx;
8674: PetscFunctionBegin;
8678: PetscCall(MatGetSize(A, &M, &N));
8679: PetscCall(VecGetSize(x, &Nx));
8680: if (M == Nx) {
8681: PetscCall(MatMultTranspose(A, x, y));
8682: } else {
8683: PetscCall(MatMult(A, x, y));
8684: }
8685: PetscFunctionReturn(PETSC_SUCCESS);
8686: }
8688: /*@
8689: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8691: Neighbor-wise Collective
8693: Input Parameters:
8694: + A - the matrix
8695: . x - the input dense matrix to be multiplied
8696: - w - the input dense matrix to be added to the result
8698: Output Parameter:
8699: . y - the output dense matrix
8701: Level: intermediate
8703: Note:
8704: This allows one to use either the restriction or interpolation (its transpose)
8705: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8706: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8708: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8709: @*/
8710: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8711: {
8712: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8713: PetscBool trans = PETSC_TRUE;
8714: MatReuse reuse = MAT_INITIAL_MATRIX;
8716: PetscFunctionBegin;
8722: PetscCall(MatGetSize(A, &M, &N));
8723: PetscCall(MatGetSize(x, &Mx, &Nx));
8724: if (N == Mx) trans = PETSC_FALSE;
8725: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8726: Mo = trans ? N : M;
8727: if (*y) {
8728: PetscCall(MatGetSize(*y, &My, &Ny));
8729: if (Mo == My && Nx == Ny) {
8730: reuse = MAT_REUSE_MATRIX;
8731: } else {
8732: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8733: PetscCall(MatDestroy(y));
8734: }
8735: }
8737: if (w && *y == w) { /* this is to minimize changes in PCMG */
8738: PetscBool flg;
8740: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8741: if (w) {
8742: PetscInt My, Ny, Mw, Nw;
8744: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8745: PetscCall(MatGetSize(*y, &My, &Ny));
8746: PetscCall(MatGetSize(w, &Mw, &Nw));
8747: if (!flg || My != Mw || Ny != Nw) w = NULL;
8748: }
8749: if (!w) {
8750: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8751: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8752: PetscCall(PetscObjectDereference((PetscObject)w));
8753: } else {
8754: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8755: }
8756: }
8757: if (!trans) {
8758: PetscCall(MatMatMult(A, x, reuse, PETSC_DETERMINE, y));
8759: } else {
8760: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DETERMINE, y));
8761: }
8762: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8763: PetscFunctionReturn(PETSC_SUCCESS);
8764: }
8766: /*@
8767: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8769: Neighbor-wise Collective
8771: Input Parameters:
8772: + A - the matrix
8773: - x - the input dense matrix
8775: Output Parameter:
8776: . y - the output dense matrix
8778: Level: intermediate
8780: Note:
8781: This allows one to use either the restriction or interpolation (its transpose)
8782: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8783: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8785: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8786: @*/
8787: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8788: {
8789: PetscFunctionBegin;
8790: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8791: PetscFunctionReturn(PETSC_SUCCESS);
8792: }
8794: /*@
8795: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8797: Neighbor-wise Collective
8799: Input Parameters:
8800: + A - the matrix
8801: - x - the input dense matrix
8803: Output Parameter:
8804: . y - the output dense matrix
8806: Level: intermediate
8808: Note:
8809: This allows one to use either the restriction or interpolation (its transpose)
8810: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
8811: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8813: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8814: @*/
8815: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8816: {
8817: PetscFunctionBegin;
8818: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8819: PetscFunctionReturn(PETSC_SUCCESS);
8820: }
8822: /*@
8823: MatGetNullSpace - retrieves the null space of a matrix.
8825: Logically Collective
8827: Input Parameters:
8828: + mat - the matrix
8829: - nullsp - the null space object
8831: Level: developer
8833: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8834: @*/
8835: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8836: {
8837: PetscFunctionBegin;
8839: PetscAssertPointer(nullsp, 2);
8840: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8841: PetscFunctionReturn(PETSC_SUCCESS);
8842: }
8844: /*@C
8845: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
8847: Logically Collective
8849: Input Parameters:
8850: + n - the number of matrices
8851: - mat - the array of matrices
8853: Output Parameters:
8854: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
8856: Level: developer
8858: Note:
8859: Call `MatRestoreNullspaces()` to provide these to another array of matrices
8861: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8862: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
8863: @*/
8864: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8865: {
8866: PetscFunctionBegin;
8867: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8868: PetscAssertPointer(mat, 2);
8869: PetscAssertPointer(nullsp, 3);
8871: PetscCall(PetscCalloc1(3 * n, nullsp));
8872: for (PetscInt i = 0; i < n; i++) {
8874: (*nullsp)[i] = mat[i]->nullsp;
8875: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
8876: (*nullsp)[n + i] = mat[i]->nearnullsp;
8877: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
8878: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
8879: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
8880: }
8881: PetscFunctionReturn(PETSC_SUCCESS);
8882: }
8884: /*@C
8885: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
8887: Logically Collective
8889: Input Parameters:
8890: + n - the number of matrices
8891: . mat - the array of matrices
8892: - nullsp - an array of null spaces
8894: Level: developer
8896: Note:
8897: Call `MatGetNullSpaces()` to create `nullsp`
8899: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8900: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
8901: @*/
8902: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8903: {
8904: PetscFunctionBegin;
8905: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8906: PetscAssertPointer(mat, 2);
8907: PetscAssertPointer(nullsp, 3);
8908: PetscAssertPointer(*nullsp, 3);
8910: for (PetscInt i = 0; i < n; i++) {
8912: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
8913: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
8914: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
8915: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
8916: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
8917: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
8918: }
8919: PetscCall(PetscFree(*nullsp));
8920: PetscFunctionReturn(PETSC_SUCCESS);
8921: }
8923: /*@
8924: MatSetNullSpace - attaches a null space to a matrix.
8926: Logically Collective
8928: Input Parameters:
8929: + mat - the matrix
8930: - nullsp - the null space object
8932: Level: advanced
8934: Notes:
8935: This null space is used by the `KSP` linear solvers to solve singular systems.
8937: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
8939: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge
8940: to zero but the linear system will still be solved in a least squares sense.
8942: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
8943: the domain of a matrix A (from $R^n$ to $R^m$ (m rows, n columns) $R^n$ = the direct sum of the null space of A, n(A), + the range of $A^T$, $R(A^T)$.
8944: Similarly $R^m$ = direct sum n($A^T$) + R(A). Hence the linear system $A x = b$ has a solution only if b in R(A) (or correspondingly b is orthogonal to
8945: n($A^T$)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
8946: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n($A^T$).
8947: This \hat{b} can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
8949: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one as called
8950: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
8951: routine also automatically calls `MatSetTransposeNullSpace()`.
8953: The user should call `MatNullSpaceDestroy()`.
8955: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
8956: `KSPSetPCSide()`
8957: @*/
8958: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
8959: {
8960: PetscFunctionBegin;
8963: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
8964: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
8965: mat->nullsp = nullsp;
8966: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
8967: PetscFunctionReturn(PETSC_SUCCESS);
8968: }
8970: /*@
8971: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
8973: Logically Collective
8975: Input Parameters:
8976: + mat - the matrix
8977: - nullsp - the null space object
8979: Level: developer
8981: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
8982: @*/
8983: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
8984: {
8985: PetscFunctionBegin;
8988: PetscAssertPointer(nullsp, 2);
8989: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
8990: PetscFunctionReturn(PETSC_SUCCESS);
8991: }
8993: /*@
8994: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
8996: Logically Collective
8998: Input Parameters:
8999: + mat - the matrix
9000: - nullsp - the null space object
9002: Level: advanced
9004: Notes:
9005: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9007: See `MatSetNullSpace()`
9009: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9010: @*/
9011: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9012: {
9013: PetscFunctionBegin;
9016: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9017: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9018: mat->transnullsp = nullsp;
9019: PetscFunctionReturn(PETSC_SUCCESS);
9020: }
9022: /*@
9023: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9024: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9026: Logically Collective
9028: Input Parameters:
9029: + mat - the matrix
9030: - nullsp - the null space object
9032: Level: advanced
9034: Notes:
9035: Overwrites any previous near null space that may have been attached
9037: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9039: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9040: @*/
9041: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9042: {
9043: PetscFunctionBegin;
9047: MatCheckPreallocated(mat, 1);
9048: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9049: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9050: mat->nearnullsp = nullsp;
9051: PetscFunctionReturn(PETSC_SUCCESS);
9052: }
9054: /*@
9055: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9057: Not Collective
9059: Input Parameter:
9060: . mat - the matrix
9062: Output Parameter:
9063: . nullsp - the null space object, `NULL` if not set
9065: Level: advanced
9067: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9068: @*/
9069: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9070: {
9071: PetscFunctionBegin;
9074: PetscAssertPointer(nullsp, 2);
9075: MatCheckPreallocated(mat, 1);
9076: *nullsp = mat->nearnullsp;
9077: PetscFunctionReturn(PETSC_SUCCESS);
9078: }
9080: /*@
9081: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9083: Collective
9085: Input Parameters:
9086: + mat - the matrix
9087: . row - row/column permutation
9088: - info - information on desired factorization process
9090: Level: developer
9092: Notes:
9093: Probably really in-place only when level of fill is zero, otherwise allocates
9094: new space to store factored matrix and deletes previous memory.
9096: Most users should employ the `KSP` interface for linear solvers
9097: instead of working directly with matrix algebra routines such as this.
9098: See, e.g., `KSPCreate()`.
9100: Developer Note:
9101: The Fortran interface is not autogenerated as the
9102: interface definition cannot be generated correctly [due to `MatFactorInfo`]
9104: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9105: @*/
9106: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9107: {
9108: PetscFunctionBegin;
9112: PetscAssertPointer(info, 3);
9113: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9114: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9115: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9116: MatCheckPreallocated(mat, 1);
9117: PetscUseTypeMethod(mat, iccfactor, row, info);
9118: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9119: PetscFunctionReturn(PETSC_SUCCESS);
9120: }
9122: /*@
9123: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9124: ghosted ones.
9126: Not Collective
9128: Input Parameters:
9129: + mat - the matrix
9130: - diag - the diagonal values, including ghost ones
9132: Level: developer
9134: Notes:
9135: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9137: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9139: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9140: @*/
9141: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9142: {
9143: PetscMPIInt size;
9145: PetscFunctionBegin;
9150: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9151: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9152: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9153: if (size == 1) {
9154: PetscInt n, m;
9155: PetscCall(VecGetSize(diag, &n));
9156: PetscCall(MatGetSize(mat, NULL, &m));
9157: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9158: PetscCall(MatDiagonalScale(mat, NULL, diag));
9159: } else {
9160: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9161: }
9162: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9163: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9164: PetscFunctionReturn(PETSC_SUCCESS);
9165: }
9167: /*@
9168: MatGetInertia - Gets the inertia from a factored matrix
9170: Collective
9172: Input Parameter:
9173: . mat - the matrix
9175: Output Parameters:
9176: + nneg - number of negative eigenvalues
9177: . nzero - number of zero eigenvalues
9178: - npos - number of positive eigenvalues
9180: Level: advanced
9182: Note:
9183: Matrix must have been factored by `MatCholeskyFactor()`
9185: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9186: @*/
9187: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9188: {
9189: PetscFunctionBegin;
9192: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9193: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9194: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9195: PetscFunctionReturn(PETSC_SUCCESS);
9196: }
9198: /*@C
9199: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9201: Neighbor-wise Collective
9203: Input Parameters:
9204: + mat - the factored matrix obtained with `MatGetFactor()`
9205: - b - the right-hand-side vectors
9207: Output Parameter:
9208: . x - the result vectors
9210: Level: developer
9212: Note:
9213: The vectors `b` and `x` cannot be the same. I.e., one cannot
9214: call `MatSolves`(A,x,x).
9216: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9217: @*/
9218: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9219: {
9220: PetscFunctionBegin;
9223: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9224: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9225: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9227: MatCheckPreallocated(mat, 1);
9228: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9229: PetscUseTypeMethod(mat, solves, b, x);
9230: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9231: PetscFunctionReturn(PETSC_SUCCESS);
9232: }
9234: /*@
9235: MatIsSymmetric - Test whether a matrix is symmetric
9237: Collective
9239: Input Parameters:
9240: + A - the matrix to test
9241: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9243: Output Parameter:
9244: . flg - the result
9246: Level: intermediate
9248: Notes:
9249: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9251: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9253: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9254: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9256: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9257: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9258: @*/
9259: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9260: {
9261: PetscFunctionBegin;
9263: PetscAssertPointer(flg, 3);
9264: if (A->symmetric != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->symmetric);
9265: else {
9266: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9267: else PetscCall(MatIsTranspose(A, A, tol, flg));
9268: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9269: }
9270: PetscFunctionReturn(PETSC_SUCCESS);
9271: }
9273: /*@
9274: MatIsHermitian - Test whether a matrix is Hermitian
9276: Collective
9278: Input Parameters:
9279: + A - the matrix to test
9280: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9282: Output Parameter:
9283: . flg - the result
9285: Level: intermediate
9287: Notes:
9288: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9290: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9292: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9293: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9295: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9296: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9297: @*/
9298: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9299: {
9300: PetscFunctionBegin;
9302: PetscAssertPointer(flg, 3);
9303: if (A->hermitian != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->hermitian);
9304: else {
9305: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9306: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9307: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9308: }
9309: PetscFunctionReturn(PETSC_SUCCESS);
9310: }
9312: /*@
9313: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9315: Not Collective
9317: Input Parameter:
9318: . A - the matrix to check
9320: Output Parameters:
9321: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9322: - flg - the result (only valid if set is `PETSC_TRUE`)
9324: Level: advanced
9326: Notes:
9327: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9328: if you want it explicitly checked
9330: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9331: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9333: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9334: @*/
9335: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9336: {
9337: PetscFunctionBegin;
9339: PetscAssertPointer(set, 2);
9340: PetscAssertPointer(flg, 3);
9341: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9342: *set = PETSC_TRUE;
9343: *flg = PetscBool3ToBool(A->symmetric);
9344: } else {
9345: *set = PETSC_FALSE;
9346: }
9347: PetscFunctionReturn(PETSC_SUCCESS);
9348: }
9350: /*@
9351: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9353: Not Collective
9355: Input Parameter:
9356: . A - the matrix to check
9358: Output Parameters:
9359: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9360: - flg - the result (only valid if set is `PETSC_TRUE`)
9362: Level: advanced
9364: Notes:
9365: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9367: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9368: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9370: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9371: @*/
9372: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9373: {
9374: PetscFunctionBegin;
9376: PetscAssertPointer(set, 2);
9377: PetscAssertPointer(flg, 3);
9378: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9379: *set = PETSC_TRUE;
9380: *flg = PetscBool3ToBool(A->spd);
9381: } else {
9382: *set = PETSC_FALSE;
9383: }
9384: PetscFunctionReturn(PETSC_SUCCESS);
9385: }
9387: /*@
9388: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9390: Not Collective
9392: Input Parameter:
9393: . A - the matrix to check
9395: Output Parameters:
9396: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9397: - flg - the result (only valid if set is `PETSC_TRUE`)
9399: Level: advanced
9401: Notes:
9402: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9403: if you want it explicitly checked
9405: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9406: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9408: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9409: @*/
9410: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9411: {
9412: PetscFunctionBegin;
9414: PetscAssertPointer(set, 2);
9415: PetscAssertPointer(flg, 3);
9416: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9417: *set = PETSC_TRUE;
9418: *flg = PetscBool3ToBool(A->hermitian);
9419: } else {
9420: *set = PETSC_FALSE;
9421: }
9422: PetscFunctionReturn(PETSC_SUCCESS);
9423: }
9425: /*@
9426: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9428: Collective
9430: Input Parameter:
9431: . A - the matrix to test
9433: Output Parameter:
9434: . flg - the result
9436: Level: intermediate
9438: Notes:
9439: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9441: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9442: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9444: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9445: @*/
9446: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9447: {
9448: PetscFunctionBegin;
9450: PetscAssertPointer(flg, 2);
9451: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9452: *flg = PetscBool3ToBool(A->structurally_symmetric);
9453: } else {
9454: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9455: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9456: }
9457: PetscFunctionReturn(PETSC_SUCCESS);
9458: }
9460: /*@
9461: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9463: Not Collective
9465: Input Parameter:
9466: . A - the matrix to check
9468: Output Parameters:
9469: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9470: - flg - the result (only valid if set is PETSC_TRUE)
9472: Level: advanced
9474: Notes:
9475: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9476: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9478: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9480: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9481: @*/
9482: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9483: {
9484: PetscFunctionBegin;
9486: PetscAssertPointer(set, 2);
9487: PetscAssertPointer(flg, 3);
9488: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9489: *set = PETSC_TRUE;
9490: *flg = PetscBool3ToBool(A->structurally_symmetric);
9491: } else {
9492: *set = PETSC_FALSE;
9493: }
9494: PetscFunctionReturn(PETSC_SUCCESS);
9495: }
9497: /*@
9498: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9499: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9501: Not Collective
9503: Input Parameter:
9504: . mat - the matrix
9506: Output Parameters:
9507: + nstash - the size of the stash
9508: . reallocs - the number of additional mallocs incurred.
9509: . bnstash - the size of the block stash
9510: - breallocs - the number of additional mallocs incurred.in the block stash
9512: Level: advanced
9514: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9515: @*/
9516: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9517: {
9518: PetscFunctionBegin;
9519: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9520: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9521: PetscFunctionReturn(PETSC_SUCCESS);
9522: }
9524: /*@
9525: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9526: parallel layout, `PetscLayout` for rows and columns
9528: Collective
9530: Input Parameter:
9531: . mat - the matrix
9533: Output Parameters:
9534: + right - (optional) vector that the matrix can be multiplied against
9535: - left - (optional) vector that the matrix vector product can be stored in
9537: Level: advanced
9539: Notes:
9540: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9542: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9544: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9545: @*/
9546: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9547: {
9548: PetscFunctionBegin;
9551: if (mat->ops->getvecs) {
9552: PetscUseTypeMethod(mat, getvecs, right, left);
9553: } else {
9554: if (right) {
9555: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9556: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9557: PetscCall(VecSetType(*right, mat->defaultvectype));
9558: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9559: if (mat->boundtocpu && mat->bindingpropagates) {
9560: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9561: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9562: }
9563: #endif
9564: }
9565: if (left) {
9566: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9567: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9568: PetscCall(VecSetType(*left, mat->defaultvectype));
9569: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9570: if (mat->boundtocpu && mat->bindingpropagates) {
9571: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9572: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9573: }
9574: #endif
9575: }
9576: }
9577: PetscFunctionReturn(PETSC_SUCCESS);
9578: }
9580: /*@
9581: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9582: with default values.
9584: Not Collective
9586: Input Parameter:
9587: . info - the `MatFactorInfo` data structure
9589: Level: developer
9591: Notes:
9592: The solvers are generally used through the `KSP` and `PC` objects, for example
9593: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9595: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9597: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9598: @*/
9599: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9600: {
9601: PetscFunctionBegin;
9602: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9603: PetscFunctionReturn(PETSC_SUCCESS);
9604: }
9606: /*@
9607: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9609: Collective
9611: Input Parameters:
9612: + mat - the factored matrix
9613: - is - the index set defining the Schur indices (0-based)
9615: Level: advanced
9617: Notes:
9618: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9620: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9622: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9624: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9625: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9626: @*/
9627: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9628: {
9629: PetscErrorCode (*f)(Mat, IS);
9631: PetscFunctionBegin;
9636: PetscCheckSameComm(mat, 1, is, 2);
9637: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9638: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9639: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9640: PetscCall(MatDestroy(&mat->schur));
9641: PetscCall((*f)(mat, is));
9642: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9643: PetscFunctionReturn(PETSC_SUCCESS);
9644: }
9646: /*@
9647: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9649: Logically Collective
9651: Input Parameters:
9652: + F - the factored matrix obtained by calling `MatGetFactor()`
9653: . S - location where to return the Schur complement, can be `NULL`
9654: - status - the status of the Schur complement matrix, can be `NULL`
9656: Level: advanced
9658: Notes:
9659: You must call `MatFactorSetSchurIS()` before calling this routine.
9661: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9663: The routine provides a copy of the Schur matrix stored within the solver data structures.
9664: The caller must destroy the object when it is no longer needed.
9665: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9667: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9669: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9671: Developer Note:
9672: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9673: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9675: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9676: @*/
9677: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9678: {
9679: PetscFunctionBegin;
9681: if (S) PetscAssertPointer(S, 2);
9682: if (status) PetscAssertPointer(status, 3);
9683: if (S) {
9684: PetscErrorCode (*f)(Mat, Mat *);
9686: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9687: if (f) {
9688: PetscCall((*f)(F, S));
9689: } else {
9690: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9691: }
9692: }
9693: if (status) *status = F->schur_status;
9694: PetscFunctionReturn(PETSC_SUCCESS);
9695: }
9697: /*@
9698: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9700: Logically Collective
9702: Input Parameters:
9703: + F - the factored matrix obtained by calling `MatGetFactor()`
9704: . S - location where to return the Schur complement, can be `NULL`
9705: - status - the status of the Schur complement matrix, can be `NULL`
9707: Level: advanced
9709: Notes:
9710: You must call `MatFactorSetSchurIS()` before calling this routine.
9712: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9714: The routine returns a the Schur Complement stored within the data structures of the solver.
9716: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9718: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9720: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9722: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9724: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9725: @*/
9726: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9727: {
9728: PetscFunctionBegin;
9730: if (S) {
9731: PetscAssertPointer(S, 2);
9732: *S = F->schur;
9733: }
9734: if (status) {
9735: PetscAssertPointer(status, 3);
9736: *status = F->schur_status;
9737: }
9738: PetscFunctionReturn(PETSC_SUCCESS);
9739: }
9741: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9742: {
9743: Mat S = F->schur;
9745: PetscFunctionBegin;
9746: switch (F->schur_status) {
9747: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9748: case MAT_FACTOR_SCHUR_INVERTED:
9749: if (S) {
9750: S->ops->solve = NULL;
9751: S->ops->matsolve = NULL;
9752: S->ops->solvetranspose = NULL;
9753: S->ops->matsolvetranspose = NULL;
9754: S->ops->solveadd = NULL;
9755: S->ops->solvetransposeadd = NULL;
9756: S->factortype = MAT_FACTOR_NONE;
9757: PetscCall(PetscFree(S->solvertype));
9758: }
9759: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9760: break;
9761: default:
9762: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9763: }
9764: PetscFunctionReturn(PETSC_SUCCESS);
9765: }
9767: /*@
9768: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9770: Logically Collective
9772: Input Parameters:
9773: + F - the factored matrix obtained by calling `MatGetFactor()`
9774: . S - location where the Schur complement is stored
9775: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9777: Level: advanced
9779: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9780: @*/
9781: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9782: {
9783: PetscFunctionBegin;
9785: if (S) {
9787: *S = NULL;
9788: }
9789: F->schur_status = status;
9790: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9791: PetscFunctionReturn(PETSC_SUCCESS);
9792: }
9794: /*@
9795: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9797: Logically Collective
9799: Input Parameters:
9800: + F - the factored matrix obtained by calling `MatGetFactor()`
9801: . rhs - location where the right-hand side of the Schur complement system is stored
9802: - sol - location where the solution of the Schur complement system has to be returned
9804: Level: advanced
9806: Notes:
9807: The sizes of the vectors should match the size of the Schur complement
9809: Must be called after `MatFactorSetSchurIS()`
9811: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9812: @*/
9813: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9814: {
9815: PetscFunctionBegin;
9822: PetscCheckSameComm(F, 1, rhs, 2);
9823: PetscCheckSameComm(F, 1, sol, 3);
9824: PetscCall(MatFactorFactorizeSchurComplement(F));
9825: switch (F->schur_status) {
9826: case MAT_FACTOR_SCHUR_FACTORED:
9827: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9828: break;
9829: case MAT_FACTOR_SCHUR_INVERTED:
9830: PetscCall(MatMultTranspose(F->schur, rhs, sol));
9831: break;
9832: default:
9833: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9834: }
9835: PetscFunctionReturn(PETSC_SUCCESS);
9836: }
9838: /*@
9839: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9841: Logically Collective
9843: Input Parameters:
9844: + F - the factored matrix obtained by calling `MatGetFactor()`
9845: . rhs - location where the right-hand side of the Schur complement system is stored
9846: - sol - location where the solution of the Schur complement system has to be returned
9848: Level: advanced
9850: Notes:
9851: The sizes of the vectors should match the size of the Schur complement
9853: Must be called after `MatFactorSetSchurIS()`
9855: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
9856: @*/
9857: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
9858: {
9859: PetscFunctionBegin;
9866: PetscCheckSameComm(F, 1, rhs, 2);
9867: PetscCheckSameComm(F, 1, sol, 3);
9868: PetscCall(MatFactorFactorizeSchurComplement(F));
9869: switch (F->schur_status) {
9870: case MAT_FACTOR_SCHUR_FACTORED:
9871: PetscCall(MatSolve(F->schur, rhs, sol));
9872: break;
9873: case MAT_FACTOR_SCHUR_INVERTED:
9874: PetscCall(MatMult(F->schur, rhs, sol));
9875: break;
9876: default:
9877: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9878: }
9879: PetscFunctionReturn(PETSC_SUCCESS);
9880: }
9882: PETSC_EXTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
9883: #if PetscDefined(HAVE_CUDA)
9884: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
9885: #endif
9887: /* Schur status updated in the interface */
9888: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
9889: {
9890: Mat S = F->schur;
9892: PetscFunctionBegin;
9893: if (S) {
9894: PetscMPIInt size;
9895: PetscBool isdense, isdensecuda;
9897: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
9898: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
9899: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
9900: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
9901: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
9902: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
9903: if (isdense) {
9904: PetscCall(MatSeqDenseInvertFactors_Private(S));
9905: } else if (isdensecuda) {
9906: #if defined(PETSC_HAVE_CUDA)
9907: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
9908: #endif
9909: }
9910: // HIP??????????????
9911: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
9912: }
9913: PetscFunctionReturn(PETSC_SUCCESS);
9914: }
9916: /*@
9917: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
9919: Logically Collective
9921: Input Parameter:
9922: . F - the factored matrix obtained by calling `MatGetFactor()`
9924: Level: advanced
9926: Notes:
9927: Must be called after `MatFactorSetSchurIS()`.
9929: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
9931: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
9932: @*/
9933: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
9934: {
9935: PetscFunctionBegin;
9938: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
9939: PetscCall(MatFactorFactorizeSchurComplement(F));
9940: PetscCall(MatFactorInvertSchurComplement_Private(F));
9941: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
9942: PetscFunctionReturn(PETSC_SUCCESS);
9943: }
9945: /*@
9946: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
9948: Logically Collective
9950: Input Parameter:
9951: . F - the factored matrix obtained by calling `MatGetFactor()`
9953: Level: advanced
9955: Note:
9956: Must be called after `MatFactorSetSchurIS()`
9958: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
9959: @*/
9960: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
9961: {
9962: MatFactorInfo info;
9964: PetscFunctionBegin;
9967: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
9968: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
9969: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
9970: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
9971: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
9972: } else {
9973: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
9974: }
9975: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
9976: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
9977: PetscFunctionReturn(PETSC_SUCCESS);
9978: }
9980: /*@
9981: MatPtAP - Creates the matrix product $C = P^T * A * P$
9983: Neighbor-wise Collective
9985: Input Parameters:
9986: + A - the matrix
9987: . P - the projection matrix
9988: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
9989: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
9990: if the result is a dense matrix this is irrelevant
9992: Output Parameter:
9993: . C - the product matrix
9995: Level: intermediate
9997: Notes:
9998: C will be created and must be destroyed by the user with `MatDestroy()`.
10000: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10002: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10004: Developer Note:
10005: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10007: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10008: @*/
10009: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10010: {
10011: PetscFunctionBegin;
10012: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10013: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10015: if (scall == MAT_INITIAL_MATRIX) {
10016: PetscCall(MatProductCreate(A, P, NULL, C));
10017: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10018: PetscCall(MatProductSetAlgorithm(*C, "default"));
10019: PetscCall(MatProductSetFill(*C, fill));
10021: (*C)->product->api_user = PETSC_TRUE;
10022: PetscCall(MatProductSetFromOptions(*C));
10023: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10024: PetscCall(MatProductSymbolic(*C));
10025: } else { /* scall == MAT_REUSE_MATRIX */
10026: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10027: }
10029: PetscCall(MatProductNumeric(*C));
10030: (*C)->symmetric = A->symmetric;
10031: (*C)->spd = A->spd;
10032: PetscFunctionReturn(PETSC_SUCCESS);
10033: }
10035: /*@
10036: MatRARt - Creates the matrix product $C = R * A * R^T$
10038: Neighbor-wise Collective
10040: Input Parameters:
10041: + A - the matrix
10042: . R - the projection matrix
10043: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10044: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10045: if the result is a dense matrix this is irrelevant
10047: Output Parameter:
10048: . C - the product matrix
10050: Level: intermediate
10052: Notes:
10053: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10055: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10057: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10058: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10059: the parallel `MatRARt()` is implemented computing the explicit transpose of `R`, which can be very expensive.
10060: We recommend using `MatPtAP()` when possible.
10062: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10064: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10065: @*/
10066: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10067: {
10068: PetscFunctionBegin;
10069: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10070: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10072: if (scall == MAT_INITIAL_MATRIX) {
10073: PetscCall(MatProductCreate(A, R, NULL, C));
10074: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10075: PetscCall(MatProductSetAlgorithm(*C, "default"));
10076: PetscCall(MatProductSetFill(*C, fill));
10078: (*C)->product->api_user = PETSC_TRUE;
10079: PetscCall(MatProductSetFromOptions(*C));
10080: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10081: PetscCall(MatProductSymbolic(*C));
10082: } else { /* scall == MAT_REUSE_MATRIX */
10083: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10084: }
10086: PetscCall(MatProductNumeric(*C));
10087: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10088: PetscFunctionReturn(PETSC_SUCCESS);
10089: }
10091: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10092: {
10093: PetscBool flg = PETSC_TRUE;
10095: PetscFunctionBegin;
10096: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10097: if (scall == MAT_INITIAL_MATRIX) {
10098: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10099: PetscCall(MatProductCreate(A, B, NULL, C));
10100: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10101: PetscCall(MatProductSetFill(*C, fill));
10102: } else { /* scall == MAT_REUSE_MATRIX */
10103: Mat_Product *product = (*C)->product;
10105: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10106: if (flg && product && product->type != ptype) {
10107: PetscCall(MatProductClear(*C));
10108: product = NULL;
10109: }
10110: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10111: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10112: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10113: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10114: product = (*C)->product;
10115: product->fill = fill;
10116: product->clear = PETSC_TRUE;
10117: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10118: flg = PETSC_FALSE;
10119: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10120: }
10121: }
10122: if (flg) {
10123: (*C)->product->api_user = PETSC_TRUE;
10124: PetscCall(MatProductSetType(*C, ptype));
10125: PetscCall(MatProductSetFromOptions(*C));
10126: PetscCall(MatProductSymbolic(*C));
10127: }
10128: PetscCall(MatProductNumeric(*C));
10129: PetscFunctionReturn(PETSC_SUCCESS);
10130: }
10132: /*@
10133: MatMatMult - Performs matrix-matrix multiplication C=A*B.
10135: Neighbor-wise Collective
10137: Input Parameters:
10138: + A - the left matrix
10139: . B - the right matrix
10140: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10141: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10142: if the result is a dense matrix this is irrelevant
10144: Output Parameter:
10145: . C - the product matrix
10147: Notes:
10148: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10150: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10151: call to this function with `MAT_INITIAL_MATRIX`.
10153: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value actually needed.
10155: In the special case where matrix `B` (and hence `C`) are dense you can create the correctly sized matrix `C` yourself and then call this routine with `MAT_REUSE_MATRIX`,
10156: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix `C` is sparse.
10158: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10160: Example of Usage:
10161: .vb
10162: MatProductCreate(A,B,NULL,&C);
10163: MatProductSetType(C,MATPRODUCT_AB);
10164: MatProductSymbolic(C);
10165: MatProductNumeric(C); // compute C=A * B
10166: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10167: MatProductNumeric(C);
10168: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10169: MatProductNumeric(C);
10170: .ve
10172: Level: intermediate
10174: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10175: @*/
10176: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10177: {
10178: PetscFunctionBegin;
10179: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10180: PetscFunctionReturn(PETSC_SUCCESS);
10181: }
10183: /*@
10184: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10186: Neighbor-wise Collective
10188: Input Parameters:
10189: + A - the left matrix
10190: . B - the right matrix
10191: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10192: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10194: Output Parameter:
10195: . C - the product matrix
10197: Options Database Key:
10198: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10199: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10200: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10202: Level: intermediate
10204: Notes:
10205: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10207: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10209: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10210: actually needed.
10212: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10213: and for pairs of `MATMPIDENSE` matrices.
10215: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`
10217: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10219: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10220: @*/
10221: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10222: {
10223: PetscFunctionBegin;
10224: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10225: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10226: PetscFunctionReturn(PETSC_SUCCESS);
10227: }
10229: /*@
10230: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10232: Neighbor-wise Collective
10234: Input Parameters:
10235: + A - the left matrix
10236: . B - the right matrix
10237: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10238: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10240: Output Parameter:
10241: . C - the product matrix
10243: Level: intermediate
10245: Notes:
10246: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10248: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
10250: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`
10252: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10253: actually needed.
10255: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10256: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10258: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10260: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10261: @*/
10262: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10263: {
10264: PetscFunctionBegin;
10265: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10266: PetscFunctionReturn(PETSC_SUCCESS);
10267: }
10269: /*@
10270: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10272: Neighbor-wise Collective
10274: Input Parameters:
10275: + A - the left matrix
10276: . B - the middle matrix
10277: . C - the right matrix
10278: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10279: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10280: if the result is a dense matrix this is irrelevant
10282: Output Parameter:
10283: . D - the product matrix
10285: Level: intermediate
10287: Notes:
10288: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10290: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10292: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`
10294: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10295: actually needed.
10297: If you have many matrices with the same non-zero structure to multiply, you
10298: should use `MAT_REUSE_MATRIX` in all calls but the first
10300: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10302: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10303: @*/
10304: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10305: {
10306: PetscFunctionBegin;
10307: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10308: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10310: if (scall == MAT_INITIAL_MATRIX) {
10311: PetscCall(MatProductCreate(A, B, C, D));
10312: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10313: PetscCall(MatProductSetAlgorithm(*D, "default"));
10314: PetscCall(MatProductSetFill(*D, fill));
10316: (*D)->product->api_user = PETSC_TRUE;
10317: PetscCall(MatProductSetFromOptions(*D));
10318: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10319: ((PetscObject)C)->type_name);
10320: PetscCall(MatProductSymbolic(*D));
10321: } else { /* user may change input matrices when REUSE */
10322: PetscCall(MatProductReplaceMats(A, B, C, *D));
10323: }
10324: PetscCall(MatProductNumeric(*D));
10325: PetscFunctionReturn(PETSC_SUCCESS);
10326: }
10328: /*@
10329: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10331: Collective
10333: Input Parameters:
10334: + mat - the matrix
10335: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10336: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10337: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10339: Output Parameter:
10340: . matredundant - redundant matrix
10342: Level: advanced
10344: Notes:
10345: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10346: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10348: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10349: calling it.
10351: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10353: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10354: @*/
10355: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10356: {
10357: MPI_Comm comm;
10358: PetscMPIInt size;
10359: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10360: Mat_Redundant *redund = NULL;
10361: PetscSubcomm psubcomm = NULL;
10362: MPI_Comm subcomm_in = subcomm;
10363: Mat *matseq;
10364: IS isrow, iscol;
10365: PetscBool newsubcomm = PETSC_FALSE;
10367: PetscFunctionBegin;
10369: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10370: PetscAssertPointer(*matredundant, 5);
10372: }
10374: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10375: if (size == 1 || nsubcomm == 1) {
10376: if (reuse == MAT_INITIAL_MATRIX) {
10377: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10378: } else {
10379: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10380: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10381: }
10382: PetscFunctionReturn(PETSC_SUCCESS);
10383: }
10385: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10386: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10387: MatCheckPreallocated(mat, 1);
10389: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10390: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10391: /* create psubcomm, then get subcomm */
10392: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10393: PetscCallMPI(MPI_Comm_size(comm, &size));
10394: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10396: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10397: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10398: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10399: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10400: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10401: newsubcomm = PETSC_TRUE;
10402: PetscCall(PetscSubcommDestroy(&psubcomm));
10403: }
10405: /* get isrow, iscol and a local sequential matrix matseq[0] */
10406: if (reuse == MAT_INITIAL_MATRIX) {
10407: mloc_sub = PETSC_DECIDE;
10408: nloc_sub = PETSC_DECIDE;
10409: if (bs < 1) {
10410: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10411: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10412: } else {
10413: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10414: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10415: }
10416: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10417: rstart = rend - mloc_sub;
10418: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10419: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10420: PetscCall(ISSetIdentity(iscol));
10421: } else { /* reuse == MAT_REUSE_MATRIX */
10422: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10423: /* retrieve subcomm */
10424: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10425: redund = (*matredundant)->redundant;
10426: isrow = redund->isrow;
10427: iscol = redund->iscol;
10428: matseq = redund->matseq;
10429: }
10430: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10432: /* get matredundant over subcomm */
10433: if (reuse == MAT_INITIAL_MATRIX) {
10434: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10436: /* create a supporting struct and attach it to C for reuse */
10437: PetscCall(PetscNew(&redund));
10438: (*matredundant)->redundant = redund;
10439: redund->isrow = isrow;
10440: redund->iscol = iscol;
10441: redund->matseq = matseq;
10442: if (newsubcomm) {
10443: redund->subcomm = subcomm;
10444: } else {
10445: redund->subcomm = MPI_COMM_NULL;
10446: }
10447: } else {
10448: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10449: }
10450: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10451: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10452: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10453: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10454: }
10455: #endif
10456: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10457: PetscFunctionReturn(PETSC_SUCCESS);
10458: }
10460: /*@C
10461: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10462: a given `Mat`. Each submatrix can span multiple procs.
10464: Collective
10466: Input Parameters:
10467: + mat - the matrix
10468: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10469: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10471: Output Parameter:
10472: . subMat - parallel sub-matrices each spanning a given `subcomm`
10474: Level: advanced
10476: Notes:
10477: The submatrix partition across processors is dictated by `subComm` a
10478: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10479: is not restricted to be grouped with consecutive original MPI processes.
10481: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10482: map directly to the layout of the original matrix [wrt the local
10483: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10484: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10485: the `subMat`. However the offDiagMat looses some columns - and this is
10486: reconstructed with `MatSetValues()`
10488: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10490: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10491: @*/
10492: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10493: {
10494: PetscMPIInt commsize, subCommSize;
10496: PetscFunctionBegin;
10497: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10498: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10499: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10501: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10502: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10503: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10504: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10505: PetscFunctionReturn(PETSC_SUCCESS);
10506: }
10508: /*@
10509: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10511: Not Collective
10513: Input Parameters:
10514: + mat - matrix to extract local submatrix from
10515: . isrow - local row indices for submatrix
10516: - iscol - local column indices for submatrix
10518: Output Parameter:
10519: . submat - the submatrix
10521: Level: intermediate
10523: Notes:
10524: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10526: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10527: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10529: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10530: `MatSetValuesBlockedLocal()` will also be implemented.
10532: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10533: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10535: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10536: @*/
10537: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10538: {
10539: PetscFunctionBegin;
10543: PetscCheckSameComm(isrow, 2, iscol, 3);
10544: PetscAssertPointer(submat, 4);
10545: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10547: if (mat->ops->getlocalsubmatrix) {
10548: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10549: } else {
10550: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10551: }
10552: PetscFunctionReturn(PETSC_SUCCESS);
10553: }
10555: /*@
10556: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10558: Not Collective
10560: Input Parameters:
10561: + mat - matrix to extract local submatrix from
10562: . isrow - local row indices for submatrix
10563: . iscol - local column indices for submatrix
10564: - submat - the submatrix
10566: Level: intermediate
10568: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10569: @*/
10570: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10571: {
10572: PetscFunctionBegin;
10576: PetscCheckSameComm(isrow, 2, iscol, 3);
10577: PetscAssertPointer(submat, 4);
10580: if (mat->ops->restorelocalsubmatrix) {
10581: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10582: } else {
10583: PetscCall(MatDestroy(submat));
10584: }
10585: *submat = NULL;
10586: PetscFunctionReturn(PETSC_SUCCESS);
10587: }
10589: /*@
10590: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10592: Collective
10594: Input Parameter:
10595: . mat - the matrix
10597: Output Parameter:
10598: . is - if any rows have zero diagonals this contains the list of them
10600: Level: developer
10602: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10603: @*/
10604: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10605: {
10606: PetscFunctionBegin;
10609: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10610: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10612: if (!mat->ops->findzerodiagonals) {
10613: Vec diag;
10614: const PetscScalar *a;
10615: PetscInt *rows;
10616: PetscInt rStart, rEnd, r, nrow = 0;
10618: PetscCall(MatCreateVecs(mat, &diag, NULL));
10619: PetscCall(MatGetDiagonal(mat, diag));
10620: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10621: PetscCall(VecGetArrayRead(diag, &a));
10622: for (r = 0; r < rEnd - rStart; ++r)
10623: if (a[r] == 0.0) ++nrow;
10624: PetscCall(PetscMalloc1(nrow, &rows));
10625: nrow = 0;
10626: for (r = 0; r < rEnd - rStart; ++r)
10627: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10628: PetscCall(VecRestoreArrayRead(diag, &a));
10629: PetscCall(VecDestroy(&diag));
10630: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10631: } else {
10632: PetscUseTypeMethod(mat, findzerodiagonals, is);
10633: }
10634: PetscFunctionReturn(PETSC_SUCCESS);
10635: }
10637: /*@
10638: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10640: Collective
10642: Input Parameter:
10643: . mat - the matrix
10645: Output Parameter:
10646: . is - contains the list of rows with off block diagonal entries
10648: Level: developer
10650: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10651: @*/
10652: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10653: {
10654: PetscFunctionBegin;
10657: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10658: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10660: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10661: PetscFunctionReturn(PETSC_SUCCESS);
10662: }
10664: /*@C
10665: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10667: Collective; No Fortran Support
10669: Input Parameter:
10670: . mat - the matrix
10672: Output Parameter:
10673: . values - the block inverses in column major order (FORTRAN-like)
10675: Level: advanced
10677: Notes:
10678: The size of the blocks is determined by the block size of the matrix.
10680: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10682: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10684: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10685: @*/
10686: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10687: {
10688: PetscFunctionBegin;
10690: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10691: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10692: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10693: PetscFunctionReturn(PETSC_SUCCESS);
10694: }
10696: /*@
10697: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10699: Collective; No Fortran Support
10701: Input Parameters:
10702: + mat - the matrix
10703: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10704: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10706: Output Parameter:
10707: . values - the block inverses in column major order (FORTRAN-like)
10709: Level: advanced
10711: Notes:
10712: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10714: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10716: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10717: @*/
10718: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10719: {
10720: PetscFunctionBegin;
10722: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10723: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10724: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10725: PetscFunctionReturn(PETSC_SUCCESS);
10726: }
10728: /*@
10729: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10731: Collective
10733: Input Parameters:
10734: + A - the matrix
10735: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10737: Level: advanced
10739: Note:
10740: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10742: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10743: @*/
10744: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10745: {
10746: const PetscScalar *vals;
10747: PetscInt *dnnz;
10748: PetscInt m, rstart, rend, bs, i, j;
10750: PetscFunctionBegin;
10751: PetscCall(MatInvertBlockDiagonal(A, &vals));
10752: PetscCall(MatGetBlockSize(A, &bs));
10753: PetscCall(MatGetLocalSize(A, &m, NULL));
10754: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10755: PetscCall(MatSetBlockSizes(C, A->rmap->bs, A->cmap->bs));
10756: PetscCall(PetscMalloc1(m / bs, &dnnz));
10757: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10758: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10759: PetscCall(PetscFree(dnnz));
10760: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10761: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10762: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10763: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10764: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10765: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10766: PetscFunctionReturn(PETSC_SUCCESS);
10767: }
10769: /*@
10770: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10771: via `MatTransposeColoringCreate()`.
10773: Collective
10775: Input Parameter:
10776: . c - coloring context
10778: Level: intermediate
10780: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10781: @*/
10782: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10783: {
10784: MatTransposeColoring matcolor = *c;
10786: PetscFunctionBegin;
10787: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10788: if (--((PetscObject)matcolor)->refct > 0) {
10789: matcolor = NULL;
10790: PetscFunctionReturn(PETSC_SUCCESS);
10791: }
10793: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10794: PetscCall(PetscFree(matcolor->rows));
10795: PetscCall(PetscFree(matcolor->den2sp));
10796: PetscCall(PetscFree(matcolor->colorforcol));
10797: PetscCall(PetscFree(matcolor->columns));
10798: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10799: PetscCall(PetscHeaderDestroy(c));
10800: PetscFunctionReturn(PETSC_SUCCESS);
10801: }
10803: /*@
10804: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
10805: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
10806: `MatTransposeColoring` to sparse `B`.
10808: Collective
10810: Input Parameters:
10811: + coloring - coloring context created with `MatTransposeColoringCreate()`
10812: - B - sparse matrix
10814: Output Parameter:
10815: . Btdense - dense matrix $B^T$
10817: Level: developer
10819: Note:
10820: These are used internally for some implementations of `MatRARt()`
10822: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10823: @*/
10824: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10825: {
10826: PetscFunctionBegin;
10831: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10832: PetscFunctionReturn(PETSC_SUCCESS);
10833: }
10835: /*@
10836: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
10837: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
10838: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10839: $C_{sp}$ from $C_{den}$.
10841: Collective
10843: Input Parameters:
10844: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10845: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
10847: Output Parameter:
10848: . Csp - sparse matrix
10850: Level: developer
10852: Note:
10853: These are used internally for some implementations of `MatRARt()`
10855: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
10856: @*/
10857: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
10858: {
10859: PetscFunctionBegin;
10864: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
10865: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
10866: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
10867: PetscFunctionReturn(PETSC_SUCCESS);
10868: }
10870: /*@
10871: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
10873: Collective
10875: Input Parameters:
10876: + mat - the matrix product C
10877: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
10879: Output Parameter:
10880: . color - the new coloring context
10882: Level: intermediate
10884: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
10885: `MatTransColoringApplyDenToSp()`
10886: @*/
10887: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
10888: {
10889: MatTransposeColoring c;
10890: MPI_Comm comm;
10892: PetscFunctionBegin;
10893: PetscAssertPointer(color, 3);
10895: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10896: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10897: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
10898: c->ctype = iscoloring->ctype;
10899: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
10900: *color = c;
10901: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10902: PetscFunctionReturn(PETSC_SUCCESS);
10903: }
10905: /*@
10906: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
10907: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
10909: Not Collective
10911: Input Parameter:
10912: . mat - the matrix
10914: Output Parameter:
10915: . state - the current state
10917: Level: intermediate
10919: Notes:
10920: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
10921: different matrices
10923: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
10925: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
10927: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
10928: @*/
10929: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
10930: {
10931: PetscFunctionBegin;
10933: *state = mat->nonzerostate;
10934: PetscFunctionReturn(PETSC_SUCCESS);
10935: }
10937: /*@
10938: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
10939: matrices from each processor
10941: Collective
10943: Input Parameters:
10944: + comm - the communicators the parallel matrix will live on
10945: . seqmat - the input sequential matrices
10946: . n - number of local columns (or `PETSC_DECIDE`)
10947: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10949: Output Parameter:
10950: . mpimat - the parallel matrix generated
10952: Level: developer
10954: Note:
10955: The number of columns of the matrix in EACH processor MUST be the same.
10957: .seealso: [](ch_matrices), `Mat`
10958: @*/
10959: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
10960: {
10961: PetscMPIInt size;
10963: PetscFunctionBegin;
10964: PetscCallMPI(MPI_Comm_size(comm, &size));
10965: if (size == 1) {
10966: if (reuse == MAT_INITIAL_MATRIX) {
10967: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
10968: } else {
10969: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
10970: }
10971: PetscFunctionReturn(PETSC_SUCCESS);
10972: }
10974: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10976: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
10977: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
10978: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
10979: PetscFunctionReturn(PETSC_SUCCESS);
10980: }
10982: /*@
10983: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
10985: Collective
10987: Input Parameters:
10988: + A - the matrix to create subdomains from
10989: - N - requested number of subdomains
10991: Output Parameters:
10992: + n - number of subdomains resulting on this MPI process
10993: - iss - `IS` list with indices of subdomains on this MPI process
10995: Level: advanced
10997: Note:
10998: The number of subdomains must be smaller than the communicator size
11000: .seealso: [](ch_matrices), `Mat`, `IS`
11001: @*/
11002: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11003: {
11004: MPI_Comm comm, subcomm;
11005: PetscMPIInt size, rank, color;
11006: PetscInt rstart, rend, k;
11008: PetscFunctionBegin;
11009: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11010: PetscCallMPI(MPI_Comm_size(comm, &size));
11011: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11012: PetscCheck(N >= 1 && N < size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11013: *n = 1;
11014: k = size / N + (size % N > 0); /* There are up to k ranks to a color */
11015: color = rank / k;
11016: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11017: PetscCall(PetscMalloc1(1, iss));
11018: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11019: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11020: PetscCallMPI(MPI_Comm_free(&subcomm));
11021: PetscFunctionReturn(PETSC_SUCCESS);
11022: }
11024: /*@
11025: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11027: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11028: If they are not the same, uses `MatMatMatMult()`.
11030: Once the coarse grid problem is constructed, correct for interpolation operators
11031: that are not of full rank, which can legitimately happen in the case of non-nested
11032: geometric multigrid.
11034: Input Parameters:
11035: + restrct - restriction operator
11036: . dA - fine grid matrix
11037: . interpolate - interpolation operator
11038: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11039: - fill - expected fill, use `PETSC_DETERMINE` or `PETSC_DETERMINE` if you do not have a good estimate
11041: Output Parameter:
11042: . A - the Galerkin coarse matrix
11044: Options Database Key:
11045: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
11047: Level: developer
11049: Note:
11050: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
11052: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11053: @*/
11054: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11055: {
11056: IS zerorows;
11057: Vec diag;
11059: PetscFunctionBegin;
11060: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11061: /* Construct the coarse grid matrix */
11062: if (interpolate == restrct) {
11063: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11064: } else {
11065: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11066: }
11068: /* If the interpolation matrix is not of full rank, A will have zero rows.
11069: This can legitimately happen in the case of non-nested geometric multigrid.
11070: In that event, we set the rows of the matrix to the rows of the identity,
11071: ignoring the equations (as the RHS will also be zero). */
11073: PetscCall(MatFindZeroRows(*A, &zerorows));
11075: if (zerorows != NULL) { /* if there are any zero rows */
11076: PetscCall(MatCreateVecs(*A, &diag, NULL));
11077: PetscCall(MatGetDiagonal(*A, diag));
11078: PetscCall(VecISSet(diag, zerorows, 1.0));
11079: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11080: PetscCall(VecDestroy(&diag));
11081: PetscCall(ISDestroy(&zerorows));
11082: }
11083: PetscFunctionReturn(PETSC_SUCCESS);
11084: }
11086: /*@C
11087: MatSetOperation - Allows user to set a matrix operation for any matrix type
11089: Logically Collective
11091: Input Parameters:
11092: + mat - the matrix
11093: . op - the name of the operation
11094: - f - the function that provides the operation
11096: Level: developer
11098: Example Usage:
11099: .vb
11100: extern PetscErrorCode usermult(Mat, Vec, Vec);
11102: PetscCall(MatCreateXXX(comm, ..., &A));
11103: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscVoidFn *)usermult));
11104: .ve
11106: Notes:
11107: See the file `include/petscmat.h` for a complete list of matrix
11108: operations, which all have the form MATOP_<OPERATION>, where
11109: <OPERATION> is the name (in all capital letters) of the
11110: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11112: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11113: sequence as the usual matrix interface routines, since they
11114: are intended to be accessed via the usual matrix interface
11115: routines, e.g.,
11116: .vb
11117: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11118: .ve
11120: In particular each function MUST return `PETSC_SUCCESS` on success and
11121: nonzero on failure.
11123: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11125: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11126: @*/
11127: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, void (*f)(void))
11128: {
11129: PetscFunctionBegin;
11131: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))mat->ops->view) mat->ops->viewnative = mat->ops->view;
11132: (((void (**)(void))mat->ops)[op]) = f;
11133: PetscFunctionReturn(PETSC_SUCCESS);
11134: }
11136: /*@C
11137: MatGetOperation - Gets a matrix operation for any matrix type.
11139: Not Collective
11141: Input Parameters:
11142: + mat - the matrix
11143: - op - the name of the operation
11145: Output Parameter:
11146: . f - the function that provides the operation
11148: Level: developer
11150: Example Usage:
11151: .vb
11152: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11154: MatGetOperation(A, MATOP_MULT, (void (**)(void))&usermult);
11155: .ve
11157: Notes:
11158: See the file include/petscmat.h for a complete list of matrix
11159: operations, which all have the form MATOP_<OPERATION>, where
11160: <OPERATION> is the name (in all capital letters) of the
11161: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11163: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11165: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11166: @*/
11167: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, void (**f)(void))
11168: {
11169: PetscFunctionBegin;
11171: *f = (((void (**)(void))mat->ops)[op]);
11172: PetscFunctionReturn(PETSC_SUCCESS);
11173: }
11175: /*@
11176: MatHasOperation - Determines whether the given matrix supports the particular operation.
11178: Not Collective
11180: Input Parameters:
11181: + mat - the matrix
11182: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11184: Output Parameter:
11185: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11187: Level: advanced
11189: Note:
11190: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11192: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11193: @*/
11194: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11195: {
11196: PetscFunctionBegin;
11198: PetscAssertPointer(has, 3);
11199: if (mat->ops->hasoperation) {
11200: PetscUseTypeMethod(mat, hasoperation, op, has);
11201: } else {
11202: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11203: else {
11204: *has = PETSC_FALSE;
11205: if (op == MATOP_CREATE_SUBMATRIX) {
11206: PetscMPIInt size;
11208: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11209: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11210: }
11211: }
11212: }
11213: PetscFunctionReturn(PETSC_SUCCESS);
11214: }
11216: /*@
11217: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11219: Collective
11221: Input Parameter:
11222: . mat - the matrix
11224: Output Parameter:
11225: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11227: Level: beginner
11229: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11230: @*/
11231: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11232: {
11233: PetscFunctionBegin;
11236: PetscAssertPointer(cong, 2);
11237: if (!mat->rmap || !mat->cmap) {
11238: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11239: PetscFunctionReturn(PETSC_SUCCESS);
11240: }
11241: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11242: PetscCall(PetscLayoutSetUp(mat->rmap));
11243: PetscCall(PetscLayoutSetUp(mat->cmap));
11244: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11245: if (*cong) mat->congruentlayouts = 1;
11246: else mat->congruentlayouts = 0;
11247: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11248: PetscFunctionReturn(PETSC_SUCCESS);
11249: }
11251: PetscErrorCode MatSetInf(Mat A)
11252: {
11253: PetscFunctionBegin;
11254: PetscUseTypeMethod(A, setinf);
11255: PetscFunctionReturn(PETSC_SUCCESS);
11256: }
11258: /*@
11259: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11260: and possibly removes small values from the graph structure.
11262: Collective
11264: Input Parameters:
11265: + A - the matrix
11266: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11267: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11268: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11269: . num_idx - size of 'index' array
11270: - index - array of block indices to use for graph strength of connection weight
11272: Output Parameter:
11273: . graph - the resulting graph
11275: Level: advanced
11277: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11278: @*/
11279: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11280: {
11281: PetscFunctionBegin;
11285: PetscAssertPointer(graph, 7);
11286: PetscCall(PetscLogEventBegin(MAT_CreateGraph, A, 0, 0, 0));
11287: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11288: PetscCall(PetscLogEventEnd(MAT_CreateGraph, A, 0, 0, 0));
11289: PetscFunctionReturn(PETSC_SUCCESS);
11290: }
11292: /*@
11293: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11294: meaning the same memory is used for the matrix, and no new memory is allocated.
11296: Collective
11298: Input Parameters:
11299: + A - the matrix
11300: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11302: Level: intermediate
11304: Developer Note:
11305: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11306: of the arrays in the data structure are unneeded.
11308: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11309: @*/
11310: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11311: {
11312: PetscFunctionBegin;
11314: PetscUseTypeMethod(A, eliminatezeros, keep);
11315: PetscFunctionReturn(PETSC_SUCCESS);
11316: }