Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_CreateGraph;
40: PetscLogEvent MAT_SetValuesBatch;
41: PetscLogEvent MAT_ViennaCLCopyToGPU;
42: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
43: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
44: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
45: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
46: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
47: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
49: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
51: /*@
52: MatSetRandom - Sets all components of a matrix to random numbers.
54: Logically Collective
56: Input Parameters:
57: + x - the matrix
58: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
59: it will create one internally.
61: Example:
62: .vb
63: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
64: MatSetRandom(x,rctx);
65: PetscRandomDestroy(rctx);
66: .ve
68: Level: intermediate
70: Notes:
71: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
73: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
75: It generates an error if used on unassembled sparse matrices that have not been preallocated.
77: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
78: @*/
79: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
80: {
81: PetscRandom randObj = NULL;
83: PetscFunctionBegin;
87: MatCheckPreallocated(x, 1);
89: if (!rctx) {
90: MPI_Comm comm;
91: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
92: PetscCall(PetscRandomCreate(comm, &randObj));
93: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
94: PetscCall(PetscRandomSetFromOptions(randObj));
95: rctx = randObj;
96: }
97: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
98: PetscUseTypeMethod(x, setrandom, rctx);
99: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
101: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
103: PetscCall(PetscRandomDestroy(&randObj));
104: PetscFunctionReturn(PETSC_SUCCESS);
105: }
107: /*@
108: MatCopyHashToXAIJ - copy hash table entries into an XAIJ matrix type
110: Logically Collective
112: Input Parameter:
113: . A - A matrix in unassembled, hash table form
115: Output Parameter:
116: . B - The XAIJ matrix. This can either be `A` or some matrix of equivalent size, e.g. obtained from `A` via `MatDuplicate()`
118: Example:
119: .vb
120: PetscCall(MatDuplicate(A, MAT_DO_NOT_COPY_VALUES, &B));
121: PetscCall(MatCopyHashToXAIJ(A, B));
122: .ve
124: Level: advanced
126: Notes:
127: If `B` is `A`, then the hash table data structure will be destroyed. `B` is assembled
129: .seealso: [](ch_matrices), `Mat`, `MAT_USE_HASH_TABLE`
130: @*/
131: PetscErrorCode MatCopyHashToXAIJ(Mat A, Mat B)
132: {
133: PetscFunctionBegin;
135: PetscUseTypeMethod(A, copyhashtoxaij, B);
136: PetscFunctionReturn(PETSC_SUCCESS);
137: }
139: /*@
140: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
142: Logically Collective
144: Input Parameter:
145: . mat - the factored matrix
147: Output Parameters:
148: + pivot - the pivot value computed
149: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
150: the share the matrix
152: Level: advanced
154: Notes:
155: This routine does not work for factorizations done with external packages.
157: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
159: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
161: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
162: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
163: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
164: @*/
165: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
166: {
167: PetscFunctionBegin;
169: PetscAssertPointer(pivot, 2);
170: PetscAssertPointer(row, 3);
171: *pivot = mat->factorerror_zeropivot_value;
172: *row = mat->factorerror_zeropivot_row;
173: PetscFunctionReturn(PETSC_SUCCESS);
174: }
176: /*@
177: MatFactorGetError - gets the error code from a factorization
179: Logically Collective
181: Input Parameter:
182: . mat - the factored matrix
184: Output Parameter:
185: . err - the error code
187: Level: advanced
189: Note:
190: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
192: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
193: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
194: @*/
195: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
196: {
197: PetscFunctionBegin;
199: PetscAssertPointer(err, 2);
200: *err = mat->factorerrortype;
201: PetscFunctionReturn(PETSC_SUCCESS);
202: }
204: /*@
205: MatFactorClearError - clears the error code in a factorization
207: Logically Collective
209: Input Parameter:
210: . mat - the factored matrix
212: Level: developer
214: Note:
215: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
217: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
218: `MatGetErrorCode()`, `MatFactorError`
219: @*/
220: PetscErrorCode MatFactorClearError(Mat mat)
221: {
222: PetscFunctionBegin;
224: mat->factorerrortype = MAT_FACTOR_NOERROR;
225: mat->factorerror_zeropivot_value = 0.0;
226: mat->factorerror_zeropivot_row = 0;
227: PetscFunctionReturn(PETSC_SUCCESS);
228: }
230: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
231: {
232: Vec r, l;
233: const PetscScalar *al;
234: PetscInt i, nz, gnz, N, n, st;
236: PetscFunctionBegin;
237: PetscCall(MatCreateVecs(mat, &r, &l));
238: if (!cols) { /* nonzero rows */
239: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
240: PetscCall(MatGetSize(mat, &N, NULL));
241: PetscCall(MatGetLocalSize(mat, &n, NULL));
242: PetscCall(VecSet(l, 0.0));
243: PetscCall(VecSetRandom(r, NULL));
244: PetscCall(MatMult(mat, r, l));
245: PetscCall(VecGetArrayRead(l, &al));
246: } else { /* nonzero columns */
247: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
248: PetscCall(MatGetSize(mat, NULL, &N));
249: PetscCall(MatGetLocalSize(mat, NULL, &n));
250: PetscCall(VecSet(r, 0.0));
251: PetscCall(VecSetRandom(l, NULL));
252: PetscCall(MatMultTranspose(mat, l, r));
253: PetscCall(VecGetArrayRead(r, &al));
254: }
255: if (tol <= 0.0) {
256: for (i = 0, nz = 0; i < n; i++)
257: if (al[i] != 0.0) nz++;
258: } else {
259: for (i = 0, nz = 0; i < n; i++)
260: if (PetscAbsScalar(al[i]) > tol) nz++;
261: }
262: PetscCallMPI(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
263: if (gnz != N) {
264: PetscInt *nzr;
265: PetscCall(PetscMalloc1(nz, &nzr));
266: if (nz) {
267: if (tol < 0) {
268: for (i = 0, nz = 0; i < n; i++)
269: if (al[i] != 0.0) nzr[nz++] = i + st;
270: } else {
271: for (i = 0, nz = 0; i < n; i++)
272: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
273: }
274: }
275: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
276: } else *nonzero = NULL;
277: if (!cols) { /* nonzero rows */
278: PetscCall(VecRestoreArrayRead(l, &al));
279: } else {
280: PetscCall(VecRestoreArrayRead(r, &al));
281: }
282: PetscCall(VecDestroy(&l));
283: PetscCall(VecDestroy(&r));
284: PetscFunctionReturn(PETSC_SUCCESS);
285: }
287: /*@
288: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
290: Input Parameter:
291: . mat - the matrix
293: Output Parameter:
294: . keptrows - the rows that are not completely zero
296: Level: intermediate
298: Note:
299: `keptrows` is set to `NULL` if all rows are nonzero.
301: Developer Note:
302: If `keptrows` is not `NULL`, it must be sorted.
304: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
305: @*/
306: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
307: {
308: PetscFunctionBegin;
311: PetscAssertPointer(keptrows, 2);
312: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
313: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
314: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
315: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
316: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
317: PetscFunctionReturn(PETSC_SUCCESS);
318: }
320: /*@
321: MatFindZeroRows - Locate all rows that are completely zero in the matrix
323: Input Parameter:
324: . mat - the matrix
326: Output Parameter:
327: . zerorows - the rows that are completely zero
329: Level: intermediate
331: Note:
332: `zerorows` is set to `NULL` if no rows are zero.
334: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
335: @*/
336: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
337: {
338: IS keptrows;
339: PetscInt m, n;
341: PetscFunctionBegin;
344: PetscAssertPointer(zerorows, 2);
345: PetscCall(MatFindNonzeroRows(mat, &keptrows));
346: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
347: In keeping with this convention, we set zerorows to NULL if there are no zero
348: rows. */
349: if (keptrows == NULL) {
350: *zerorows = NULL;
351: } else {
352: PetscCall(MatGetOwnershipRange(mat, &m, &n));
353: PetscCall(ISComplement(keptrows, m, n, zerorows));
354: PetscCall(ISDestroy(&keptrows));
355: }
356: PetscFunctionReturn(PETSC_SUCCESS);
357: }
359: /*@
360: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
362: Not Collective
364: Input Parameter:
365: . A - the matrix
367: Output Parameter:
368: . a - the diagonal part (which is a SEQUENTIAL matrix)
370: Level: advanced
372: Notes:
373: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
375: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
377: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
378: @*/
379: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
380: {
381: PetscFunctionBegin;
384: PetscAssertPointer(a, 2);
385: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
386: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
387: else {
388: PetscMPIInt size;
390: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
391: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
392: *a = A;
393: }
394: PetscFunctionReturn(PETSC_SUCCESS);
395: }
397: /*@
398: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
400: Collective
402: Input Parameter:
403: . mat - the matrix
405: Output Parameter:
406: . trace - the sum of the diagonal entries
408: Level: advanced
410: .seealso: [](ch_matrices), `Mat`
411: @*/
412: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
413: {
414: Vec diag;
416: PetscFunctionBegin;
418: PetscAssertPointer(trace, 2);
419: PetscCall(MatCreateVecs(mat, &diag, NULL));
420: PetscCall(MatGetDiagonal(mat, diag));
421: PetscCall(VecSum(diag, trace));
422: PetscCall(VecDestroy(&diag));
423: PetscFunctionReturn(PETSC_SUCCESS);
424: }
426: /*@
427: MatRealPart - Zeros out the imaginary part of the matrix
429: Logically Collective
431: Input Parameter:
432: . mat - the matrix
434: Level: advanced
436: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
437: @*/
438: PetscErrorCode MatRealPart(Mat mat)
439: {
440: PetscFunctionBegin;
443: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
444: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
445: MatCheckPreallocated(mat, 1);
446: PetscUseTypeMethod(mat, realpart);
447: PetscFunctionReturn(PETSC_SUCCESS);
448: }
450: /*@C
451: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
453: Collective
455: Input Parameter:
456: . mat - the matrix
458: Output Parameters:
459: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
460: - ghosts - the global indices of the ghost points
462: Level: advanced
464: Note:
465: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
467: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
468: @*/
469: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
470: {
471: PetscFunctionBegin;
474: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
475: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
476: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
477: else {
478: if (nghosts) *nghosts = 0;
479: if (ghosts) *ghosts = NULL;
480: }
481: PetscFunctionReturn(PETSC_SUCCESS);
482: }
484: /*@
485: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
487: Logically Collective
489: Input Parameter:
490: . mat - the matrix
492: Level: advanced
494: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
495: @*/
496: PetscErrorCode MatImaginaryPart(Mat mat)
497: {
498: PetscFunctionBegin;
501: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
502: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
503: MatCheckPreallocated(mat, 1);
504: PetscUseTypeMethod(mat, imaginarypart);
505: PetscFunctionReturn(PETSC_SUCCESS);
506: }
508: /*@
509: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure
511: Not Collective
513: Input Parameter:
514: . mat - the matrix
516: Output Parameters:
517: + missing - is any diagonal entry missing
518: - dd - first diagonal entry that is missing (optional) on this process
520: Level: advanced
522: Note:
523: This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value
525: .seealso: [](ch_matrices), `Mat`
526: @*/
527: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
528: {
529: PetscFunctionBegin;
532: PetscAssertPointer(missing, 2);
533: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
534: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
535: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
536: PetscFunctionReturn(PETSC_SUCCESS);
537: }
539: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
540: /*@C
541: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
542: for each row that you get to ensure that your application does
543: not bleed memory.
545: Not Collective
547: Input Parameters:
548: + mat - the matrix
549: - row - the row to get
551: Output Parameters:
552: + ncols - if not `NULL`, the number of nonzeros in `row`
553: . cols - if not `NULL`, the column numbers
554: - vals - if not `NULL`, the numerical values
556: Level: advanced
558: Notes:
559: This routine is provided for people who need to have direct access
560: to the structure of a matrix. We hope that we provide enough
561: high-level matrix routines that few users will need it.
563: `MatGetRow()` always returns 0-based column indices, regardless of
564: whether the internal representation is 0-based (default) or 1-based.
566: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
567: not wish to extract these quantities.
569: The user can only examine the values extracted with `MatGetRow()`;
570: the values CANNOT be altered. To change the matrix entries, one
571: must use `MatSetValues()`.
573: You can only have one call to `MatGetRow()` outstanding for a particular
574: matrix at a time, per processor. `MatGetRow()` can only obtain rows
575: associated with the given processor, it cannot get rows from the
576: other processors; for that we suggest using `MatCreateSubMatrices()`, then
577: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
578: is in the global number of rows.
580: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
582: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
584: Fortran Note:
585: .vb
586: PetscInt, pointer :: cols(:)
587: PetscScalar, pointer :: vals(:)
588: .ve
590: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
591: @*/
592: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
593: {
594: PetscInt incols;
596: PetscFunctionBegin;
599: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
600: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
601: MatCheckPreallocated(mat, 1);
602: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
603: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
604: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
605: if (ncols) *ncols = incols;
606: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
607: PetscFunctionReturn(PETSC_SUCCESS);
608: }
610: /*@
611: MatConjugate - replaces the matrix values with their complex conjugates
613: Logically Collective
615: Input Parameter:
616: . mat - the matrix
618: Level: advanced
620: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
621: @*/
622: PetscErrorCode MatConjugate(Mat mat)
623: {
624: PetscFunctionBegin;
626: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
627: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
628: PetscUseTypeMethod(mat, conjugate);
629: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
630: }
631: PetscFunctionReturn(PETSC_SUCCESS);
632: }
634: /*@C
635: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
637: Not Collective
639: Input Parameters:
640: + mat - the matrix
641: . row - the row to get
642: . ncols - the number of nonzeros
643: . cols - the columns of the nonzeros
644: - vals - if nonzero the column values
646: Level: advanced
648: Notes:
649: This routine should be called after you have finished examining the entries.
651: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
652: us of the array after it has been restored. If you pass `NULL`, it will
653: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
655: Fortran Note:
656: .vb
657: PetscInt, pointer :: cols(:)
658: PetscScalar, pointer :: vals(:)
659: .ve
661: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
662: @*/
663: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
664: {
665: PetscFunctionBegin;
667: if (ncols) PetscAssertPointer(ncols, 3);
668: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
669: PetscTryTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
670: if (ncols) *ncols = 0;
671: if (cols) *cols = NULL;
672: if (vals) *vals = NULL;
673: PetscFunctionReturn(PETSC_SUCCESS);
674: }
676: /*@
677: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
678: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
680: Not Collective
682: Input Parameter:
683: . mat - the matrix
685: Level: advanced
687: Note:
688: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
690: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
691: @*/
692: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
693: {
694: PetscFunctionBegin;
697: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
698: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
699: MatCheckPreallocated(mat, 1);
700: PetscTryTypeMethod(mat, getrowuppertriangular);
701: PetscFunctionReturn(PETSC_SUCCESS);
702: }
704: /*@
705: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
707: Not Collective
709: Input Parameter:
710: . mat - the matrix
712: Level: advanced
714: Note:
715: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
717: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
718: @*/
719: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
720: {
721: PetscFunctionBegin;
724: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
725: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
726: MatCheckPreallocated(mat, 1);
727: PetscTryTypeMethod(mat, restorerowuppertriangular);
728: PetscFunctionReturn(PETSC_SUCCESS);
729: }
731: /*@
732: MatSetOptionsPrefix - Sets the prefix used for searching for all
733: `Mat` options in the database.
735: Logically Collective
737: Input Parameters:
738: + A - the matrix
739: - prefix - the prefix to prepend to all option names
741: Level: advanced
743: Notes:
744: A hyphen (-) must NOT be given at the beginning of the prefix name.
745: The first character of all runtime options is AUTOMATICALLY the hyphen.
747: This is NOT used for options for the factorization of the matrix. Normally the
748: prefix is automatically passed in from the PC calling the factorization. To set
749: it directly use `MatSetOptionsPrefixFactor()`
751: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
752: @*/
753: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
754: {
755: PetscFunctionBegin;
757: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
758: PetscFunctionReturn(PETSC_SUCCESS);
759: }
761: /*@
762: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
763: for matrices created with `MatGetFactor()`
765: Logically Collective
767: Input Parameters:
768: + A - the matrix
769: - prefix - the prefix to prepend to all option names for the factored matrix
771: Level: developer
773: Notes:
774: A hyphen (-) must NOT be given at the beginning of the prefix name.
775: The first character of all runtime options is AUTOMATICALLY the hyphen.
777: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
778: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
780: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
781: @*/
782: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
783: {
784: PetscFunctionBegin;
786: if (prefix) {
787: PetscAssertPointer(prefix, 2);
788: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
789: if (prefix != A->factorprefix) {
790: PetscCall(PetscFree(A->factorprefix));
791: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
792: }
793: } else PetscCall(PetscFree(A->factorprefix));
794: PetscFunctionReturn(PETSC_SUCCESS);
795: }
797: /*@
798: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
799: for matrices created with `MatGetFactor()`
801: Logically Collective
803: Input Parameters:
804: + A - the matrix
805: - prefix - the prefix to prepend to all option names for the factored matrix
807: Level: developer
809: Notes:
810: A hyphen (-) must NOT be given at the beginning of the prefix name.
811: The first character of all runtime options is AUTOMATICALLY the hyphen.
813: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
814: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
816: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
817: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
818: `MatSetOptionsPrefix()`
819: @*/
820: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
821: {
822: size_t len1, len2, new_len;
824: PetscFunctionBegin;
826: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
827: if (!A->factorprefix) {
828: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
829: PetscFunctionReturn(PETSC_SUCCESS);
830: }
831: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
833: PetscCall(PetscStrlen(A->factorprefix, &len1));
834: PetscCall(PetscStrlen(prefix, &len2));
835: new_len = len1 + len2 + 1;
836: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
837: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
838: PetscFunctionReturn(PETSC_SUCCESS);
839: }
841: /*@
842: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
843: matrix options in the database.
845: Logically Collective
847: Input Parameters:
848: + A - the matrix
849: - prefix - the prefix to prepend to all option names
851: Level: advanced
853: Note:
854: A hyphen (-) must NOT be given at the beginning of the prefix name.
855: The first character of all runtime options is AUTOMATICALLY the hyphen.
857: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
858: @*/
859: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
860: {
861: PetscFunctionBegin;
863: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
864: PetscFunctionReturn(PETSC_SUCCESS);
865: }
867: /*@
868: MatGetOptionsPrefix - Gets the prefix used for searching for all
869: matrix options in the database.
871: Not Collective
873: Input Parameter:
874: . A - the matrix
876: Output Parameter:
877: . prefix - pointer to the prefix string used
879: Level: advanced
881: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
882: @*/
883: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
884: {
885: PetscFunctionBegin;
887: PetscAssertPointer(prefix, 2);
888: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
889: PetscFunctionReturn(PETSC_SUCCESS);
890: }
892: /*@
893: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
895: Not Collective
897: Input Parameter:
898: . A - the matrix
900: Output Parameter:
901: . state - the object state
903: Level: advanced
905: Note:
906: Object state is an integer which gets increased every time
907: the object is changed. By saving and later querying the object state
908: one can determine whether information about the object is still current.
910: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
912: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
913: @*/
914: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
915: {
916: PetscFunctionBegin;
918: PetscAssertPointer(state, 2);
919: PetscCall(PetscObjectStateGet((PetscObject)A, state));
920: PetscFunctionReturn(PETSC_SUCCESS);
921: }
923: /*@
924: MatResetPreallocation - Reset matrix to use the original preallocation values provided by the user, for example with `MatXAIJSetPreallocation()`
926: Collective
928: Input Parameter:
929: . A - the matrix
931: Level: beginner
933: Notes:
934: After calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY` the matrix data structures represent the nonzeros assigned to the
935: matrix. If that space is less than the preallocated space that extra preallocated space is no longer available to take on new values. `MatResetPreallocation()`
936: makes all of the preallocation space available
938: Current values in the matrix are lost in this call
940: Currently only supported for `MATAIJ` matrices.
942: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
943: @*/
944: PetscErrorCode MatResetPreallocation(Mat A)
945: {
946: PetscFunctionBegin;
949: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
950: PetscFunctionReturn(PETSC_SUCCESS);
951: }
953: /*@
954: MatResetHash - Reset the matrix so that it will use a hash table for the next round of `MatSetValues()` and `MatAssemblyBegin()`/`MatAssemblyEnd()`.
956: Collective
958: Input Parameter:
959: . A - the matrix
961: Level: intermediate
963: Notes:
964: The matrix will again delete the hash table data structures after following calls to `MatAssemblyBegin()`/`MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
966: Currently only supported for `MATAIJ` matrices.
968: .seealso: [](ch_matrices), `Mat`, `MatResetPreallocation()`
969: @*/
970: PetscErrorCode MatResetHash(Mat A)
971: {
972: PetscFunctionBegin;
975: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset to hash state after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
976: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
977: PetscUseMethod(A, "MatResetHash_C", (Mat), (A));
978: /* These flags are used to determine whether certain setups occur */
979: A->was_assembled = PETSC_FALSE;
980: A->assembled = PETSC_FALSE;
981: /* Log that the state of this object has changed; this will help guarantee that preconditioners get re-setup */
982: PetscCall(PetscObjectStateIncrease((PetscObject)A));
983: PetscFunctionReturn(PETSC_SUCCESS);
984: }
986: /*@
987: MatSetUp - Sets up the internal matrix data structures for later use by the matrix
989: Collective
991: Input Parameter:
992: . A - the matrix
994: Level: advanced
996: Notes:
997: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
998: setting values in the matrix.
1000: This routine is called internally by other `Mat` functions when needed so rarely needs to be called by users
1002: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
1003: @*/
1004: PetscErrorCode MatSetUp(Mat A)
1005: {
1006: PetscFunctionBegin;
1008: if (!((PetscObject)A)->type_name) {
1009: PetscMPIInt size;
1011: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1012: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
1013: }
1014: if (!A->preallocated) PetscTryTypeMethod(A, setup);
1015: PetscCall(PetscLayoutSetUp(A->rmap));
1016: PetscCall(PetscLayoutSetUp(A->cmap));
1017: A->preallocated = PETSC_TRUE;
1018: PetscFunctionReturn(PETSC_SUCCESS);
1019: }
1021: #if defined(PETSC_HAVE_SAWS)
1022: #include <petscviewersaws.h>
1023: #endif
1025: /*
1026: If threadsafety is on extraneous matrices may be printed
1028: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
1029: */
1030: #if !defined(PETSC_HAVE_THREADSAFETY)
1031: static PetscInt insidematview = 0;
1032: #endif
1034: /*@
1035: MatViewFromOptions - View properties of the matrix based on options set in the options database
1037: Collective
1039: Input Parameters:
1040: + A - the matrix
1041: . obj - optional additional object that provides the options prefix to use
1042: - name - command line option
1044: Options Database Key:
1045: . -mat_view [viewertype]:... - the viewer and its options
1047: Level: intermediate
1049: Note:
1050: .vb
1051: If no value is provided ascii:stdout is used
1052: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
1053: for example ascii::ascii_info prints just the information about the object not all details
1054: unless :append is given filename opens in write mode, overwriting what was already there
1055: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
1056: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
1057: socket[:port] defaults to the standard output port
1058: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
1059: .ve
1061: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1062: @*/
1063: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1064: {
1065: PetscFunctionBegin;
1067: #if !defined(PETSC_HAVE_THREADSAFETY)
1068: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1069: #endif
1070: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1071: PetscFunctionReturn(PETSC_SUCCESS);
1072: }
1074: /*@
1075: MatView - display information about a matrix in a variety ways
1077: Collective on viewer
1079: Input Parameters:
1080: + mat - the matrix
1081: - viewer - visualization context
1083: Options Database Keys:
1084: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1085: . -mat_view ::ascii_info_detail - Prints more detailed info
1086: . -mat_view - Prints matrix in ASCII format
1087: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1088: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1089: . -display <name> - Sets display name (default is host)
1090: . -draw_pause <sec> - Sets number of seconds to pause after display
1091: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1092: . -viewer_socket_machine <machine> - -
1093: . -viewer_socket_port <port> - -
1094: . -mat_view binary - save matrix to file in binary format
1095: - -viewer_binary_filename <name> - -
1097: Level: beginner
1099: Notes:
1100: The available visualization contexts include
1101: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1102: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1103: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1104: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1106: The user can open alternative visualization contexts with
1107: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1108: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a specified file; corresponding input uses `MatLoad()`
1109: . `PetscViewerDrawOpen()` - Outputs nonzero matrix nonzero structure to an X window display
1110: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer, `PETSCVIEWERSOCKET`. Only the `MATSEQDENSE` and `MATAIJ` types support this viewer.
1112: The user can call `PetscViewerPushFormat()` to specify the output
1113: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1114: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1115: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1116: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1117: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1118: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse format common among all matrix types
1119: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific format (which is in many cases the same as the default)
1120: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix size and structure (not the matrix entries)
1121: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about the matrix nonzero structure (still not vector or matrix entries)
1123: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1124: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1126: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1128: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1129: viewer is used.
1131: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1132: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1134: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1135: and then use the following mouse functions.
1136: .vb
1137: left mouse: zoom in
1138: middle mouse: zoom out
1139: right mouse: continue with the simulation
1140: .ve
1142: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1143: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1144: @*/
1145: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1146: {
1147: PetscInt rows, cols, rbs, cbs;
1148: PetscBool isascii, isstring, issaws;
1149: PetscViewerFormat format;
1150: PetscMPIInt size;
1152: PetscFunctionBegin;
1155: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1158: PetscCall(PetscViewerGetFormat(viewer, &format));
1159: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1160: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1162: #if !defined(PETSC_HAVE_THREADSAFETY)
1163: insidematview++;
1164: #endif
1165: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1166: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1167: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1168: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1170: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1171: if (isascii) {
1172: if (!mat->preallocated) {
1173: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1174: #if !defined(PETSC_HAVE_THREADSAFETY)
1175: insidematview--;
1176: #endif
1177: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1178: PetscFunctionReturn(PETSC_SUCCESS);
1179: }
1180: if (!mat->assembled) {
1181: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1182: #if !defined(PETSC_HAVE_THREADSAFETY)
1183: insidematview--;
1184: #endif
1185: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1186: PetscFunctionReturn(PETSC_SUCCESS);
1187: }
1188: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1189: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1190: MatNullSpace nullsp, transnullsp;
1192: PetscCall(PetscViewerASCIIPushTab(viewer));
1193: PetscCall(MatGetSize(mat, &rows, &cols));
1194: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1195: if (rbs != 1 || cbs != 1) {
1196: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1197: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1198: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1199: if (mat->factortype) {
1200: MatSolverType solver;
1201: PetscCall(MatFactorGetSolverType(mat, &solver));
1202: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1203: }
1204: if (mat->ops->getinfo) {
1205: MatInfo info;
1206: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1207: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1208: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1209: }
1210: PetscCall(MatGetNullSpace(mat, &nullsp));
1211: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1212: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1213: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1214: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1215: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1216: PetscCall(PetscViewerASCIIPushTab(viewer));
1217: PetscCall(MatProductView(mat, viewer));
1218: PetscCall(PetscViewerASCIIPopTab(viewer));
1219: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1220: IS tmp;
1222: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1223: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1224: PetscCall(PetscViewerASCIIPushTab(viewer));
1225: PetscCall(ISView(tmp, viewer));
1226: PetscCall(PetscViewerASCIIPopTab(viewer));
1227: PetscCall(ISDestroy(&tmp));
1228: }
1229: }
1230: } else if (issaws) {
1231: #if defined(PETSC_HAVE_SAWS)
1232: PetscMPIInt rank;
1234: PetscCall(PetscObjectName((PetscObject)mat));
1235: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1236: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1237: #endif
1238: } else if (isstring) {
1239: const char *type;
1240: PetscCall(MatGetType(mat, &type));
1241: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1242: PetscTryTypeMethod(mat, view, viewer);
1243: }
1244: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1245: PetscCall(PetscViewerASCIIPushTab(viewer));
1246: PetscUseTypeMethod(mat, viewnative, viewer);
1247: PetscCall(PetscViewerASCIIPopTab(viewer));
1248: } else if (mat->ops->view) {
1249: PetscCall(PetscViewerASCIIPushTab(viewer));
1250: PetscUseTypeMethod(mat, view, viewer);
1251: PetscCall(PetscViewerASCIIPopTab(viewer));
1252: }
1253: if (isascii) {
1254: PetscCall(PetscViewerGetFormat(viewer, &format));
1255: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1256: }
1257: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1258: #if !defined(PETSC_HAVE_THREADSAFETY)
1259: insidematview--;
1260: #endif
1261: PetscFunctionReturn(PETSC_SUCCESS);
1262: }
1264: #if defined(PETSC_USE_DEBUG)
1265: #include <../src/sys/totalview/tv_data_display.h>
1266: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1267: {
1268: TV_add_row("Local rows", "int", &mat->rmap->n);
1269: TV_add_row("Local columns", "int", &mat->cmap->n);
1270: TV_add_row("Global rows", "int", &mat->rmap->N);
1271: TV_add_row("Global columns", "int", &mat->cmap->N);
1272: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1273: return TV_format_OK;
1274: }
1275: #endif
1277: /*@
1278: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1279: with `MatView()`. The matrix format is determined from the options database.
1280: Generates a parallel MPI matrix if the communicator has more than one
1281: processor. The default matrix type is `MATAIJ`.
1283: Collective
1285: Input Parameters:
1286: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1287: or some related function before a call to `MatLoad()`
1288: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1290: Options Database Key:
1291: . -matload_block_size <bs> - set block size
1293: Level: beginner
1295: Notes:
1296: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1297: `Mat` before calling this routine if you wish to set it from the options database.
1299: `MatLoad()` automatically loads into the options database any options
1300: given in the file filename.info where filename is the name of the file
1301: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1302: file will be ignored if you use the -viewer_binary_skip_info option.
1304: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1305: sets the default matrix type AIJ and sets the local and global sizes.
1306: If type and/or size is already set, then the same are used.
1308: In parallel, each processor can load a subset of rows (or the
1309: entire matrix). This routine is especially useful when a large
1310: matrix is stored on disk and only part of it is desired on each
1311: processor. For example, a parallel solver may access only some of
1312: the rows from each processor. The algorithm used here reads
1313: relatively small blocks of data rather than reading the entire
1314: matrix and then subsetting it.
1316: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1317: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1318: or the sequence like
1319: .vb
1320: `PetscViewer` v;
1321: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1322: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1323: `PetscViewerSetFromOptions`(v);
1324: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1325: `PetscViewerFileSetName`(v,"datafile");
1326: .ve
1327: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1328: .vb
1329: -viewer_type {binary, hdf5}
1330: .ve
1332: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1333: and src/mat/tutorials/ex10.c with the second approach.
1335: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1336: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1337: Multiple objects, both matrices and vectors, can be stored within the same file.
1338: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1340: Most users should not need to know the details of the binary storage
1341: format, since `MatLoad()` and `MatView()` completely hide these details.
1342: But for anyone who is interested, the standard binary matrix storage
1343: format is
1345: .vb
1346: PetscInt MAT_FILE_CLASSID
1347: PetscInt number of rows
1348: PetscInt number of columns
1349: PetscInt total number of nonzeros
1350: PetscInt *number nonzeros in each row
1351: PetscInt *column indices of all nonzeros (starting index is zero)
1352: PetscScalar *values of all nonzeros
1353: .ve
1354: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1355: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1356: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1358: PETSc automatically does the byte swapping for
1359: machines that store the bytes reversed. Thus if you write your own binary
1360: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1361: and `PetscBinaryWrite()` to see how this may be done.
1363: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1364: Each processor's chunk is loaded independently by its owning MPI process.
1365: Multiple objects, both matrices and vectors, can be stored within the same file.
1366: They are looked up by their PetscObject name.
1368: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1369: by default the same structure and naming of the AIJ arrays and column count
1370: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1371: .vb
1372: save example.mat A b -v7.3
1373: .ve
1374: can be directly read by this routine (see Reference 1 for details).
1376: Depending on your MATLAB version, this format might be a default,
1377: otherwise you can set it as default in Preferences.
1379: Unless -nocompression flag is used to save the file in MATLAB,
1380: PETSc must be configured with ZLIB package.
1382: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1384: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1386: Corresponding `MatView()` is not yet implemented.
1388: The loaded matrix is actually a transpose of the original one in MATLAB,
1389: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1390: With this format, matrix is automatically transposed by PETSc,
1391: unless the matrix is marked as SPD or symmetric
1392: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1394: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1396: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1397: @*/
1398: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1399: {
1400: PetscBool flg;
1402: PetscFunctionBegin;
1406: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1408: flg = PETSC_FALSE;
1409: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1410: if (flg) {
1411: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1412: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1413: }
1414: flg = PETSC_FALSE;
1415: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1416: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1418: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1419: PetscUseTypeMethod(mat, load, viewer);
1420: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1421: PetscFunctionReturn(PETSC_SUCCESS);
1422: }
1424: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1425: {
1426: Mat_Redundant *redund = *redundant;
1428: PetscFunctionBegin;
1429: if (redund) {
1430: if (redund->matseq) { /* via MatCreateSubMatrices() */
1431: PetscCall(ISDestroy(&redund->isrow));
1432: PetscCall(ISDestroy(&redund->iscol));
1433: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1434: } else {
1435: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1436: PetscCall(PetscFree(redund->sbuf_j));
1437: PetscCall(PetscFree(redund->sbuf_a));
1438: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1439: PetscCall(PetscFree(redund->rbuf_j[i]));
1440: PetscCall(PetscFree(redund->rbuf_a[i]));
1441: }
1442: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1443: }
1445: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1446: PetscCall(PetscFree(redund));
1447: }
1448: PetscFunctionReturn(PETSC_SUCCESS);
1449: }
1451: /*@
1452: MatDestroy - Frees space taken by a matrix.
1454: Collective
1456: Input Parameter:
1457: . A - the matrix
1459: Level: beginner
1461: Developer Note:
1462: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1463: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1464: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1465: if changes are needed here.
1467: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1468: @*/
1469: PetscErrorCode MatDestroy(Mat *A)
1470: {
1471: PetscFunctionBegin;
1472: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1474: if (--((PetscObject)*A)->refct > 0) {
1475: *A = NULL;
1476: PetscFunctionReturn(PETSC_SUCCESS);
1477: }
1479: /* if memory was published with SAWs then destroy it */
1480: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1481: PetscTryTypeMethod(*A, destroy);
1483: PetscCall(PetscFree((*A)->factorprefix));
1484: PetscCall(PetscFree((*A)->defaultvectype));
1485: PetscCall(PetscFree((*A)->defaultrandtype));
1486: PetscCall(PetscFree((*A)->bsizes));
1487: PetscCall(PetscFree((*A)->solvertype));
1488: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1489: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1490: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1491: PetscCall(MatProductClear(*A));
1492: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1493: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1494: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1495: PetscCall(MatDestroy(&(*A)->schur));
1496: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1497: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1498: PetscCall(PetscHeaderDestroy(A));
1499: PetscFunctionReturn(PETSC_SUCCESS);
1500: }
1502: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1503: /*@
1504: MatSetValues - Inserts or adds a block of values into a matrix.
1505: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1506: MUST be called after all calls to `MatSetValues()` have been completed.
1508: Not Collective
1510: Input Parameters:
1511: + mat - the matrix
1512: . m - the number of rows
1513: . idxm - the global indices of the rows
1514: . n - the number of columns
1515: . idxn - the global indices of the columns
1516: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1517: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1518: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1520: Level: beginner
1522: Notes:
1523: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1524: options cannot be mixed without intervening calls to the assembly
1525: routines.
1527: `MatSetValues()` uses 0-based row and column numbers in Fortran
1528: as well as in C.
1530: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1531: simply ignored. This allows easily inserting element stiffness matrices
1532: with homogeneous Dirichlet boundary conditions that you don't want represented
1533: in the matrix.
1535: Efficiency Alert:
1536: The routine `MatSetValuesBlocked()` may offer much better efficiency
1537: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1539: Fortran Notes:
1540: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1541: .vb
1542: call MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
1543: .ve
1545: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1547: Developer Note:
1548: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1549: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1551: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1552: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1553: @*/
1554: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1555: {
1556: PetscFunctionBeginHot;
1559: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1560: PetscAssertPointer(idxm, 3);
1561: PetscAssertPointer(idxn, 5);
1562: MatCheckPreallocated(mat, 1);
1564: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1565: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1567: if (PetscDefined(USE_DEBUG)) {
1568: PetscInt i, j;
1570: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1571: if (v) {
1572: for (i = 0; i < m; i++) {
1573: for (j = 0; j < n; j++) {
1574: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1575: #if defined(PETSC_USE_COMPLEX)
1576: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1577: #else
1578: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1579: #endif
1580: }
1581: }
1582: }
1583: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1584: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1585: }
1587: if (mat->assembled) {
1588: mat->was_assembled = PETSC_TRUE;
1589: mat->assembled = PETSC_FALSE;
1590: }
1591: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1592: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1593: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1594: PetscFunctionReturn(PETSC_SUCCESS);
1595: }
1597: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1598: /*@
1599: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1600: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1601: MUST be called after all calls to `MatSetValues()` have been completed.
1603: Not Collective
1605: Input Parameters:
1606: + mat - the matrix
1607: . ism - the rows to provide
1608: . isn - the columns to provide
1609: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1610: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1611: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1613: Level: beginner
1615: Notes:
1616: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1618: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1619: options cannot be mixed without intervening calls to the assembly
1620: routines.
1622: `MatSetValues()` uses 0-based row and column numbers in Fortran
1623: as well as in C.
1625: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1626: simply ignored. This allows easily inserting element stiffness matrices
1627: with homogeneous Dirichlet boundary conditions that you don't want represented
1628: in the matrix.
1630: Fortran Note:
1631: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1633: Efficiency Alert:
1634: The routine `MatSetValuesBlocked()` may offer much better efficiency
1635: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1637: This is currently not optimized for any particular `ISType`
1639: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1640: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1641: @*/
1642: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1643: {
1644: PetscInt m, n;
1645: const PetscInt *rows, *cols;
1647: PetscFunctionBeginHot;
1649: PetscCall(ISGetIndices(ism, &rows));
1650: PetscCall(ISGetIndices(isn, &cols));
1651: PetscCall(ISGetLocalSize(ism, &m));
1652: PetscCall(ISGetLocalSize(isn, &n));
1653: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1654: PetscCall(ISRestoreIndices(ism, &rows));
1655: PetscCall(ISRestoreIndices(isn, &cols));
1656: PetscFunctionReturn(PETSC_SUCCESS);
1657: }
1659: /*@
1660: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1661: values into a matrix
1663: Not Collective
1665: Input Parameters:
1666: + mat - the matrix
1667: . row - the (block) row to set
1668: - v - a one-dimensional array that contains the values. For `MATBAIJ` they are implicitly stored as a two-dimensional array, by default in row-major order.
1669: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1671: Level: intermediate
1673: Notes:
1674: The values, `v`, are column-oriented (for the block version) and sorted
1676: All the nonzero values in `row` must be provided
1678: The matrix must have previously had its column indices set, likely by having been assembled.
1680: `row` must belong to this MPI process
1682: Fortran Note:
1683: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1685: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1686: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1687: @*/
1688: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1689: {
1690: PetscInt globalrow;
1692: PetscFunctionBegin;
1695: PetscAssertPointer(v, 3);
1696: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1697: PetscCall(MatSetValuesRow(mat, globalrow, v));
1698: PetscFunctionReturn(PETSC_SUCCESS);
1699: }
1701: /*@
1702: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1703: values into a matrix
1705: Not Collective
1707: Input Parameters:
1708: + mat - the matrix
1709: . row - the (block) row to set
1710: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1712: Level: advanced
1714: Notes:
1715: The values, `v`, are column-oriented for the block version.
1717: All the nonzeros in `row` must be provided
1719: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1721: `row` must belong to this process
1723: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1724: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1725: @*/
1726: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1727: {
1728: PetscFunctionBeginHot;
1731: MatCheckPreallocated(mat, 1);
1732: PetscAssertPointer(v, 3);
1733: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1734: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1735: mat->insertmode = INSERT_VALUES;
1737: if (mat->assembled) {
1738: mat->was_assembled = PETSC_TRUE;
1739: mat->assembled = PETSC_FALSE;
1740: }
1741: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1742: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1743: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1744: PetscFunctionReturn(PETSC_SUCCESS);
1745: }
1747: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1748: /*@
1749: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1750: Using structured grid indexing
1752: Not Collective
1754: Input Parameters:
1755: + mat - the matrix
1756: . m - number of rows being entered
1757: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1758: . n - number of columns being entered
1759: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1760: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1761: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1762: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1764: Level: beginner
1766: Notes:
1767: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1769: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1770: options cannot be mixed without intervening calls to the assembly
1771: routines.
1773: The grid coordinates are across the entire grid, not just the local portion
1775: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1776: as well as in C.
1778: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1780: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1781: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1783: The columns and rows in the stencil passed in MUST be contained within the
1784: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1785: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1786: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1787: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1789: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1790: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1791: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1792: `DM_BOUNDARY_PERIODIC` boundary type.
1794: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1795: a single value per point) you can skip filling those indices.
1797: Inspired by the structured grid interface to the HYPRE package
1798: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1800: Fortran Note:
1801: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1803: Efficiency Alert:
1804: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1805: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1807: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1808: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1809: @*/
1810: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1811: {
1812: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1813: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1814: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1816: PetscFunctionBegin;
1817: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1820: PetscAssertPointer(idxm, 3);
1821: PetscAssertPointer(idxn, 5);
1823: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1824: jdxm = buf;
1825: jdxn = buf + m;
1826: } else {
1827: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1828: jdxm = bufm;
1829: jdxn = bufn;
1830: }
1831: for (i = 0; i < m; i++) {
1832: for (j = 0; j < 3 - sdim; j++) dxm++;
1833: tmp = *dxm++ - starts[0];
1834: for (j = 0; j < dim - 1; j++) {
1835: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1836: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1837: }
1838: if (mat->stencil.noc) dxm++;
1839: jdxm[i] = tmp;
1840: }
1841: for (i = 0; i < n; i++) {
1842: for (j = 0; j < 3 - sdim; j++) dxn++;
1843: tmp = *dxn++ - starts[0];
1844: for (j = 0; j < dim - 1; j++) {
1845: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1846: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1847: }
1848: if (mat->stencil.noc) dxn++;
1849: jdxn[i] = tmp;
1850: }
1851: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1852: PetscCall(PetscFree2(bufm, bufn));
1853: PetscFunctionReturn(PETSC_SUCCESS);
1854: }
1856: /*@
1857: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1858: Using structured grid indexing
1860: Not Collective
1862: Input Parameters:
1863: + mat - the matrix
1864: . m - number of rows being entered
1865: . idxm - grid coordinates for matrix rows being entered
1866: . n - number of columns being entered
1867: . idxn - grid coordinates for matrix columns being entered
1868: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1869: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1870: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1872: Level: beginner
1874: Notes:
1875: By default the values, `v`, are row-oriented and unsorted.
1876: See `MatSetOption()` for other options.
1878: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1879: options cannot be mixed without intervening calls to the assembly
1880: routines.
1882: The grid coordinates are across the entire grid, not just the local portion
1884: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1885: as well as in C.
1887: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1889: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1890: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1892: The columns and rows in the stencil passed in MUST be contained within the
1893: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1894: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1895: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1896: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1898: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1899: simply ignored. This allows easily inserting element stiffness matrices
1900: with homogeneous Dirichlet boundary conditions that you don't want represented
1901: in the matrix.
1903: Inspired by the structured grid interface to the HYPRE package
1904: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1906: Fortran Notes:
1907: `idxm` and `idxn` should be declared as
1908: .vb
1909: MatStencil idxm(4,m),idxn(4,n)
1910: .ve
1911: and the values inserted using
1912: .vb
1913: idxm(MatStencil_i,1) = i
1914: idxm(MatStencil_j,1) = j
1915: idxm(MatStencil_k,1) = k
1916: etc
1917: .ve
1919: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1921: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1922: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1923: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1924: @*/
1925: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1926: {
1927: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1928: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1929: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1931: PetscFunctionBegin;
1932: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1935: PetscAssertPointer(idxm, 3);
1936: PetscAssertPointer(idxn, 5);
1937: PetscAssertPointer(v, 6);
1939: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1940: jdxm = buf;
1941: jdxn = buf + m;
1942: } else {
1943: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1944: jdxm = bufm;
1945: jdxn = bufn;
1946: }
1947: for (i = 0; i < m; i++) {
1948: for (j = 0; j < 3 - sdim; j++) dxm++;
1949: tmp = *dxm++ - starts[0];
1950: for (j = 0; j < sdim - 1; j++) {
1951: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1952: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1953: }
1954: dxm++;
1955: jdxm[i] = tmp;
1956: }
1957: for (i = 0; i < n; i++) {
1958: for (j = 0; j < 3 - sdim; j++) dxn++;
1959: tmp = *dxn++ - starts[0];
1960: for (j = 0; j < sdim - 1; j++) {
1961: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1962: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1963: }
1964: dxn++;
1965: jdxn[i] = tmp;
1966: }
1967: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1968: PetscCall(PetscFree2(bufm, bufn));
1969: PetscFunctionReturn(PETSC_SUCCESS);
1970: }
1972: /*@
1973: MatSetStencil - Sets the grid information for setting values into a matrix via
1974: `MatSetValuesStencil()`
1976: Not Collective
1978: Input Parameters:
1979: + mat - the matrix
1980: . dim - dimension of the grid 1, 2, or 3
1981: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1982: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1983: - dof - number of degrees of freedom per node
1985: Level: beginner
1987: Notes:
1988: Inspired by the structured grid interface to the HYPRE package
1989: (www.llnl.gov/CASC/hyper)
1991: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1992: user.
1994: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1995: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1996: @*/
1997: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1998: {
1999: PetscFunctionBegin;
2001: PetscAssertPointer(dims, 3);
2002: PetscAssertPointer(starts, 4);
2004: mat->stencil.dim = dim + (dof > 1);
2005: for (PetscInt i = 0; i < dim; i++) {
2006: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
2007: mat->stencil.starts[i] = starts[dim - i - 1];
2008: }
2009: mat->stencil.dims[dim] = dof;
2010: mat->stencil.starts[dim] = 0;
2011: mat->stencil.noc = (PetscBool)(dof == 1);
2012: PetscFunctionReturn(PETSC_SUCCESS);
2013: }
2015: /*@
2016: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
2018: Not Collective
2020: Input Parameters:
2021: + mat - the matrix
2022: . m - the number of block rows
2023: . idxm - the global block indices
2024: . n - the number of block columns
2025: . idxn - the global block indices
2026: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2027: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2028: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
2030: Level: intermediate
2032: Notes:
2033: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
2034: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
2036: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
2037: NOT the total number of rows/columns; for example, if the block size is 2 and
2038: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
2039: The values in `idxm` would be 1 2; that is the first index for each block divided by
2040: the block size.
2042: You must call `MatSetBlockSize()` when constructing this matrix (before
2043: preallocating it).
2045: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2047: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
2048: options cannot be mixed without intervening calls to the assembly
2049: routines.
2051: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
2052: as well as in C.
2054: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2055: simply ignored. This allows easily inserting element stiffness matrices
2056: with homogeneous Dirichlet boundary conditions that you don't want represented
2057: in the matrix.
2059: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2060: internal searching must be done to determine where to place the
2061: data in the matrix storage space. By instead inserting blocks of
2062: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2063: reduced.
2065: Example:
2066: .vb
2067: Suppose m=n=2 and block size(bs) = 2 The array is
2069: 1 2 | 3 4
2070: 5 6 | 7 8
2071: - - - | - - -
2072: 9 10 | 11 12
2073: 13 14 | 15 16
2075: v[] should be passed in like
2076: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2078: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2079: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2080: .ve
2082: Fortran Notes:
2083: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2084: .vb
2085: call MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
2086: .ve
2088: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2090: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2091: @*/
2092: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2093: {
2094: PetscFunctionBeginHot;
2097: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2098: PetscAssertPointer(idxm, 3);
2099: PetscAssertPointer(idxn, 5);
2100: MatCheckPreallocated(mat, 1);
2101: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2102: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2103: if (PetscDefined(USE_DEBUG)) {
2104: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2105: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2106: }
2107: if (PetscDefined(USE_DEBUG)) {
2108: PetscInt rbs, cbs, M, N, i;
2109: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2110: PetscCall(MatGetSize(mat, &M, &N));
2111: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2112: for (i = 0; i < n; i++)
2113: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2114: }
2115: if (mat->assembled) {
2116: mat->was_assembled = PETSC_TRUE;
2117: mat->assembled = PETSC_FALSE;
2118: }
2119: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2120: if (mat->ops->setvaluesblocked) {
2121: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2122: } else {
2123: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2124: PetscInt i, j, bs, cbs;
2126: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2127: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2128: iidxm = buf;
2129: iidxn = buf + m * bs;
2130: } else {
2131: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2132: iidxm = bufr;
2133: iidxn = bufc;
2134: }
2135: for (i = 0; i < m; i++) {
2136: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2137: }
2138: if (m != n || bs != cbs || idxm != idxn) {
2139: for (i = 0; i < n; i++) {
2140: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2141: }
2142: } else iidxn = iidxm;
2143: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2144: PetscCall(PetscFree2(bufr, bufc));
2145: }
2146: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2147: PetscFunctionReturn(PETSC_SUCCESS);
2148: }
2150: /*@
2151: MatGetValues - Gets a block of local values from a matrix.
2153: Not Collective; can only return values that are owned by the give process
2155: Input Parameters:
2156: + mat - the matrix
2157: . v - a logically two-dimensional array for storing the values
2158: . m - the number of rows
2159: . idxm - the global indices of the rows
2160: . n - the number of columns
2161: - idxn - the global indices of the columns
2163: Level: advanced
2165: Notes:
2166: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2167: The values, `v`, are then returned in a row-oriented format,
2168: analogous to that used by default in `MatSetValues()`.
2170: `MatGetValues()` uses 0-based row and column numbers in
2171: Fortran as well as in C.
2173: `MatGetValues()` requires that the matrix has been assembled
2174: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2175: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2176: without intermediate matrix assembly.
2178: Negative row or column indices will be ignored and those locations in `v` will be
2179: left unchanged.
2181: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2182: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2183: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2185: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2186: @*/
2187: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2188: {
2189: PetscFunctionBegin;
2192: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2193: PetscAssertPointer(idxm, 3);
2194: PetscAssertPointer(idxn, 5);
2195: PetscAssertPointer(v, 6);
2196: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2197: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2198: MatCheckPreallocated(mat, 1);
2200: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2201: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2202: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2203: PetscFunctionReturn(PETSC_SUCCESS);
2204: }
2206: /*@
2207: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2208: defined previously by `MatSetLocalToGlobalMapping()`
2210: Not Collective
2212: Input Parameters:
2213: + mat - the matrix
2214: . nrow - number of rows
2215: . irow - the row local indices
2216: . ncol - number of columns
2217: - icol - the column local indices
2219: Output Parameter:
2220: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2221: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2223: Level: advanced
2225: Notes:
2226: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2228: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2229: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2230: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2231: with `MatSetLocalToGlobalMapping()`.
2233: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2234: `MatSetValuesLocal()`, `MatGetValues()`
2235: @*/
2236: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2237: {
2238: PetscFunctionBeginHot;
2241: MatCheckPreallocated(mat, 1);
2242: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2243: PetscAssertPointer(irow, 3);
2244: PetscAssertPointer(icol, 5);
2245: if (PetscDefined(USE_DEBUG)) {
2246: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2247: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2248: }
2249: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2250: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2251: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2252: else {
2253: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2254: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2255: irowm = buf;
2256: icolm = buf + nrow;
2257: } else {
2258: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2259: irowm = bufr;
2260: icolm = bufc;
2261: }
2262: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2263: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2264: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2265: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2266: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2267: PetscCall(PetscFree2(bufr, bufc));
2268: }
2269: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2270: PetscFunctionReturn(PETSC_SUCCESS);
2271: }
2273: /*@
2274: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2275: the same size. Currently, this can only be called once and creates the given matrix.
2277: Not Collective
2279: Input Parameters:
2280: + mat - the matrix
2281: . nb - the number of blocks
2282: . bs - the number of rows (and columns) in each block
2283: . rows - a concatenation of the rows for each block
2284: - v - a concatenation of logically two-dimensional arrays of values
2286: Level: advanced
2288: Notes:
2289: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2291: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2293: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2294: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2295: @*/
2296: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2297: {
2298: PetscFunctionBegin;
2301: PetscAssertPointer(rows, 4);
2302: PetscAssertPointer(v, 5);
2303: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2305: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2306: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2307: else {
2308: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2309: }
2310: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2311: PetscFunctionReturn(PETSC_SUCCESS);
2312: }
2314: /*@
2315: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2316: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2317: using a local (per-processor) numbering.
2319: Not Collective
2321: Input Parameters:
2322: + x - the matrix
2323: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2324: - cmapping - column mapping
2326: Level: intermediate
2328: Note:
2329: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2331: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2332: @*/
2333: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2334: {
2335: PetscFunctionBegin;
2340: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2341: else {
2342: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2343: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2344: }
2345: PetscFunctionReturn(PETSC_SUCCESS);
2346: }
2348: /*@
2349: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2351: Not Collective
2353: Input Parameter:
2354: . A - the matrix
2356: Output Parameters:
2357: + rmapping - row mapping
2358: - cmapping - column mapping
2360: Level: advanced
2362: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2363: @*/
2364: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2365: {
2366: PetscFunctionBegin;
2369: if (rmapping) {
2370: PetscAssertPointer(rmapping, 2);
2371: *rmapping = A->rmap->mapping;
2372: }
2373: if (cmapping) {
2374: PetscAssertPointer(cmapping, 3);
2375: *cmapping = A->cmap->mapping;
2376: }
2377: PetscFunctionReturn(PETSC_SUCCESS);
2378: }
2380: /*@
2381: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2383: Logically Collective
2385: Input Parameters:
2386: + A - the matrix
2387: . rmap - row layout
2388: - cmap - column layout
2390: Level: advanced
2392: Note:
2393: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2395: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2396: @*/
2397: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2398: {
2399: PetscFunctionBegin;
2401: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2402: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2403: PetscFunctionReturn(PETSC_SUCCESS);
2404: }
2406: /*@
2407: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2409: Not Collective
2411: Input Parameter:
2412: . A - the matrix
2414: Output Parameters:
2415: + rmap - row layout
2416: - cmap - column layout
2418: Level: advanced
2420: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2421: @*/
2422: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2423: {
2424: PetscFunctionBegin;
2427: if (rmap) {
2428: PetscAssertPointer(rmap, 2);
2429: *rmap = A->rmap;
2430: }
2431: if (cmap) {
2432: PetscAssertPointer(cmap, 3);
2433: *cmap = A->cmap;
2434: }
2435: PetscFunctionReturn(PETSC_SUCCESS);
2436: }
2438: /*@
2439: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2440: using a local numbering of the rows and columns.
2442: Not Collective
2444: Input Parameters:
2445: + mat - the matrix
2446: . nrow - number of rows
2447: . irow - the row local indices
2448: . ncol - number of columns
2449: . icol - the column local indices
2450: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2451: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2452: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2454: Level: intermediate
2456: Notes:
2457: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2459: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2460: options cannot be mixed without intervening calls to the assembly
2461: routines.
2463: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2464: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2466: Fortran Notes:
2467: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2468: .vb
2469: call MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2470: .ve
2472: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2474: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2475: `MatGetValuesLocal()`
2476: @*/
2477: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2478: {
2479: PetscFunctionBeginHot;
2482: MatCheckPreallocated(mat, 1);
2483: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2484: PetscAssertPointer(irow, 3);
2485: PetscAssertPointer(icol, 5);
2486: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2487: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2488: if (PetscDefined(USE_DEBUG)) {
2489: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2490: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2491: }
2493: if (mat->assembled) {
2494: mat->was_assembled = PETSC_TRUE;
2495: mat->assembled = PETSC_FALSE;
2496: }
2497: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2498: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2499: else {
2500: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2501: const PetscInt *irowm, *icolm;
2503: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2504: bufr = buf;
2505: bufc = buf + nrow;
2506: irowm = bufr;
2507: icolm = bufc;
2508: } else {
2509: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2510: irowm = bufr;
2511: icolm = bufc;
2512: }
2513: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2514: else irowm = irow;
2515: if (mat->cmap->mapping) {
2516: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2517: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2518: } else icolm = irowm;
2519: } else icolm = icol;
2520: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2521: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2522: }
2523: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2524: PetscFunctionReturn(PETSC_SUCCESS);
2525: }
2527: /*@
2528: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2529: using a local ordering of the nodes a block at a time.
2531: Not Collective
2533: Input Parameters:
2534: + mat - the matrix
2535: . nrow - number of rows
2536: . irow - the row local indices
2537: . ncol - number of columns
2538: . icol - the column local indices
2539: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2540: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2541: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2543: Level: intermediate
2545: Notes:
2546: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2547: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2549: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2550: options cannot be mixed without intervening calls to the assembly
2551: routines.
2553: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2554: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2556: Fortran Notes:
2557: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2558: .vb
2559: call MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2560: .ve
2562: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2564: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2565: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2566: @*/
2567: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2568: {
2569: PetscFunctionBeginHot;
2572: MatCheckPreallocated(mat, 1);
2573: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2574: PetscAssertPointer(irow, 3);
2575: PetscAssertPointer(icol, 5);
2576: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2577: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2578: if (PetscDefined(USE_DEBUG)) {
2579: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2580: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2581: }
2583: if (mat->assembled) {
2584: mat->was_assembled = PETSC_TRUE;
2585: mat->assembled = PETSC_FALSE;
2586: }
2587: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2588: PetscInt irbs, rbs;
2589: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2590: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2591: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2592: }
2593: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2594: PetscInt icbs, cbs;
2595: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2596: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2597: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2598: }
2599: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2600: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2601: else {
2602: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2603: const PetscInt *irowm, *icolm;
2605: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2606: bufr = buf;
2607: bufc = buf + nrow;
2608: irowm = bufr;
2609: icolm = bufc;
2610: } else {
2611: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2612: irowm = bufr;
2613: icolm = bufc;
2614: }
2615: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2616: else irowm = irow;
2617: if (mat->cmap->mapping) {
2618: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2619: PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2620: } else icolm = irowm;
2621: } else icolm = icol;
2622: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2623: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2624: }
2625: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2626: PetscFunctionReturn(PETSC_SUCCESS);
2627: }
2629: /*@
2630: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2632: Collective
2634: Input Parameters:
2635: + mat - the matrix
2636: - x - the vector to be multiplied
2638: Output Parameter:
2639: . y - the result
2641: Level: developer
2643: Note:
2644: The vectors `x` and `y` cannot be the same. I.e., one cannot
2645: call `MatMultDiagonalBlock`(A,y,y).
2647: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2648: @*/
2649: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2650: {
2651: PetscFunctionBegin;
2657: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2658: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2659: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2660: MatCheckPreallocated(mat, 1);
2662: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2663: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2664: PetscFunctionReturn(PETSC_SUCCESS);
2665: }
2667: /*@
2668: MatMult - Computes the matrix-vector product, $y = Ax$.
2670: Neighbor-wise Collective
2672: Input Parameters:
2673: + mat - the matrix
2674: - x - the vector to be multiplied
2676: Output Parameter:
2677: . y - the result
2679: Level: beginner
2681: Note:
2682: The vectors `x` and `y` cannot be the same. I.e., one cannot
2683: call `MatMult`(A,y,y).
2685: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2686: @*/
2687: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2688: {
2689: PetscFunctionBegin;
2693: VecCheckAssembled(x);
2695: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2696: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2697: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2698: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2699: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2700: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2701: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2702: PetscCall(VecSetErrorIfLocked(y, 3));
2703: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2704: MatCheckPreallocated(mat, 1);
2706: PetscCall(VecLockReadPush(x));
2707: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2708: PetscUseTypeMethod(mat, mult, x, y);
2709: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2710: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2711: PetscCall(VecLockReadPop(x));
2712: PetscFunctionReturn(PETSC_SUCCESS);
2713: }
2715: /*@
2716: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2718: Neighbor-wise Collective
2720: Input Parameters:
2721: + mat - the matrix
2722: - x - the vector to be multiplied
2724: Output Parameter:
2725: . y - the result
2727: Level: beginner
2729: Notes:
2730: The vectors `x` and `y` cannot be the same. I.e., one cannot
2731: call `MatMultTranspose`(A,y,y).
2733: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2734: use `MatMultHermitianTranspose()`
2736: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2737: @*/
2738: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2739: {
2740: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2742: PetscFunctionBegin;
2746: VecCheckAssembled(x);
2749: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2750: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2751: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2752: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2753: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2754: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2755: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2756: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2757: MatCheckPreallocated(mat, 1);
2759: if (!mat->ops->multtranspose) {
2760: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2761: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2762: } else op = mat->ops->multtranspose;
2763: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2764: PetscCall(VecLockReadPush(x));
2765: PetscCall((*op)(mat, x, y));
2766: PetscCall(VecLockReadPop(x));
2767: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2768: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2769: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2770: PetscFunctionReturn(PETSC_SUCCESS);
2771: }
2773: /*@
2774: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2776: Neighbor-wise Collective
2778: Input Parameters:
2779: + mat - the matrix
2780: - x - the vector to be multiplied
2782: Output Parameter:
2783: . y - the result
2785: Level: beginner
2787: Notes:
2788: The vectors `x` and `y` cannot be the same. I.e., one cannot
2789: call `MatMultHermitianTranspose`(A,y,y).
2791: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2793: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2795: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2796: @*/
2797: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2798: {
2799: PetscFunctionBegin;
2805: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2806: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2807: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2808: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2809: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2810: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2811: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2812: MatCheckPreallocated(mat, 1);
2814: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2815: #if defined(PETSC_USE_COMPLEX)
2816: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2817: PetscCall(VecLockReadPush(x));
2818: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2819: else PetscUseTypeMethod(mat, mult, x, y);
2820: PetscCall(VecLockReadPop(x));
2821: } else {
2822: Vec w;
2823: PetscCall(VecDuplicate(x, &w));
2824: PetscCall(VecCopy(x, w));
2825: PetscCall(VecConjugate(w));
2826: PetscCall(MatMultTranspose(mat, w, y));
2827: PetscCall(VecDestroy(&w));
2828: PetscCall(VecConjugate(y));
2829: }
2830: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2831: #else
2832: PetscCall(MatMultTranspose(mat, x, y));
2833: #endif
2834: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2835: PetscFunctionReturn(PETSC_SUCCESS);
2836: }
2838: /*@
2839: MatMultAdd - Computes $v3 = v2 + A * v1$.
2841: Neighbor-wise Collective
2843: Input Parameters:
2844: + mat - the matrix
2845: . v1 - the vector to be multiplied by `mat`
2846: - v2 - the vector to be added to the result
2848: Output Parameter:
2849: . v3 - the result
2851: Level: beginner
2853: Note:
2854: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2855: call `MatMultAdd`(A,v1,v2,v1).
2857: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2858: @*/
2859: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2860: {
2861: PetscFunctionBegin;
2868: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2869: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2870: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2871: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2872: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2873: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2874: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2875: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2876: MatCheckPreallocated(mat, 1);
2878: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2879: PetscCall(VecLockReadPush(v1));
2880: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2881: PetscCall(VecLockReadPop(v1));
2882: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2883: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2884: PetscFunctionReturn(PETSC_SUCCESS);
2885: }
2887: /*@
2888: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2890: Neighbor-wise Collective
2892: Input Parameters:
2893: + mat - the matrix
2894: . v1 - the vector to be multiplied by the transpose of the matrix
2895: - v2 - the vector to be added to the result
2897: Output Parameter:
2898: . v3 - the result
2900: Level: beginner
2902: Note:
2903: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2904: call `MatMultTransposeAdd`(A,v1,v2,v1).
2906: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2907: @*/
2908: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2909: {
2910: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2912: PetscFunctionBegin;
2919: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2920: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2921: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2922: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2923: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2924: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2925: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2926: MatCheckPreallocated(mat, 1);
2928: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2929: PetscCall(VecLockReadPush(v1));
2930: PetscCall((*op)(mat, v1, v2, v3));
2931: PetscCall(VecLockReadPop(v1));
2932: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2933: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2934: PetscFunctionReturn(PETSC_SUCCESS);
2935: }
2937: /*@
2938: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2940: Neighbor-wise Collective
2942: Input Parameters:
2943: + mat - the matrix
2944: . v1 - the vector to be multiplied by the Hermitian transpose
2945: - v2 - the vector to be added to the result
2947: Output Parameter:
2948: . v3 - the result
2950: Level: beginner
2952: Note:
2953: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2954: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2956: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2957: @*/
2958: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2959: {
2960: PetscFunctionBegin;
2967: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2968: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2969: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2970: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2971: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2972: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2973: MatCheckPreallocated(mat, 1);
2975: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2976: PetscCall(VecLockReadPush(v1));
2977: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2978: else {
2979: Vec w, z;
2980: PetscCall(VecDuplicate(v1, &w));
2981: PetscCall(VecCopy(v1, w));
2982: PetscCall(VecConjugate(w));
2983: PetscCall(VecDuplicate(v3, &z));
2984: PetscCall(MatMultTranspose(mat, w, z));
2985: PetscCall(VecDestroy(&w));
2986: PetscCall(VecConjugate(z));
2987: if (v2 != v3) {
2988: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2989: } else {
2990: PetscCall(VecAXPY(v3, 1.0, z));
2991: }
2992: PetscCall(VecDestroy(&z));
2993: }
2994: PetscCall(VecLockReadPop(v1));
2995: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2996: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2997: PetscFunctionReturn(PETSC_SUCCESS);
2998: }
3000: /*@
3001: MatGetFactorType - gets the type of factorization a matrix is
3003: Not Collective
3005: Input Parameter:
3006: . mat - the matrix
3008: Output Parameter:
3009: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3011: Level: intermediate
3013: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3014: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3015: @*/
3016: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
3017: {
3018: PetscFunctionBegin;
3021: PetscAssertPointer(t, 2);
3022: *t = mat->factortype;
3023: PetscFunctionReturn(PETSC_SUCCESS);
3024: }
3026: /*@
3027: MatSetFactorType - sets the type of factorization a matrix is
3029: Logically Collective
3031: Input Parameters:
3032: + mat - the matrix
3033: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3035: Level: intermediate
3037: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3038: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3039: @*/
3040: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
3041: {
3042: PetscFunctionBegin;
3045: mat->factortype = t;
3046: PetscFunctionReturn(PETSC_SUCCESS);
3047: }
3049: /*@
3050: MatGetInfo - Returns information about matrix storage (number of
3051: nonzeros, memory, etc.).
3053: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3055: Input Parameters:
3056: + mat - the matrix
3057: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3059: Output Parameter:
3060: . info - matrix information context
3062: Options Database Key:
3063: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3065: Level: intermediate
3067: Notes:
3068: The `MatInfo` context contains a variety of matrix data, including
3069: number of nonzeros allocated and used, number of mallocs during
3070: matrix assembly, etc. Additional information for factored matrices
3071: is provided (such as the fill ratio, number of mallocs during
3072: factorization, etc.).
3074: Example:
3075: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3076: data within the `MatInfo` context. For example,
3077: .vb
3078: MatInfo info;
3079: Mat A;
3080: double mal, nz_a, nz_u;
3082: MatGetInfo(A, MAT_LOCAL, &info);
3083: mal = info.mallocs;
3084: nz_a = info.nz_allocated;
3085: .ve
3087: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3088: @*/
3089: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3090: {
3091: PetscFunctionBegin;
3094: PetscAssertPointer(info, 3);
3095: MatCheckPreallocated(mat, 1);
3096: PetscUseTypeMethod(mat, getinfo, flag, info);
3097: PetscFunctionReturn(PETSC_SUCCESS);
3098: }
3100: /*
3101: This is used by external packages where it is not easy to get the info from the actual
3102: matrix factorization.
3103: */
3104: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3105: {
3106: PetscFunctionBegin;
3107: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3108: PetscFunctionReturn(PETSC_SUCCESS);
3109: }
3111: /*@
3112: MatLUFactor - Performs in-place LU factorization of matrix.
3114: Collective
3116: Input Parameters:
3117: + mat - the matrix
3118: . row - row permutation
3119: . col - column permutation
3120: - info - options for factorization, includes
3121: .vb
3122: fill - expected fill as ratio of original fill.
3123: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3124: Run with the option -info to determine an optimal value to use
3125: .ve
3127: Level: developer
3129: Notes:
3130: Most users should employ the `KSP` interface for linear solvers
3131: instead of working directly with matrix algebra routines such as this.
3132: See, e.g., `KSPCreate()`.
3134: This changes the state of the matrix to a factored matrix; it cannot be used
3135: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3137: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3138: when not using `KSP`.
3140: Fortran Note:
3141: A valid (non-null) `info` argument must be provided
3143: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3144: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3145: @*/
3146: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3147: {
3148: MatFactorInfo tinfo;
3150: PetscFunctionBegin;
3154: if (info) PetscAssertPointer(info, 4);
3156: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3157: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3158: MatCheckPreallocated(mat, 1);
3159: if (!info) {
3160: PetscCall(MatFactorInfoInitialize(&tinfo));
3161: info = &tinfo;
3162: }
3164: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3165: PetscUseTypeMethod(mat, lufactor, row, col, info);
3166: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3167: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3168: PetscFunctionReturn(PETSC_SUCCESS);
3169: }
3171: /*@
3172: MatILUFactor - Performs in-place ILU factorization of matrix.
3174: Collective
3176: Input Parameters:
3177: + mat - the matrix
3178: . row - row permutation
3179: . col - column permutation
3180: - info - structure containing
3181: .vb
3182: levels - number of levels of fill.
3183: expected fill - as ratio of original fill.
3184: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3185: missing diagonal entries)
3186: .ve
3188: Level: developer
3190: Notes:
3191: Most users should employ the `KSP` interface for linear solvers
3192: instead of working directly with matrix algebra routines such as this.
3193: See, e.g., `KSPCreate()`.
3195: Probably really in-place only when level of fill is zero, otherwise allocates
3196: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3197: when not using `KSP`.
3199: Fortran Note:
3200: A valid (non-null) `info` argument must be provided
3202: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3203: @*/
3204: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3205: {
3206: PetscFunctionBegin;
3210: PetscAssertPointer(info, 4);
3212: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3213: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3214: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3215: MatCheckPreallocated(mat, 1);
3217: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3218: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3219: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3220: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3221: PetscFunctionReturn(PETSC_SUCCESS);
3222: }
3224: /*@
3225: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3226: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3228: Collective
3230: Input Parameters:
3231: + fact - the factor matrix obtained with `MatGetFactor()`
3232: . mat - the matrix
3233: . row - the row permutation
3234: . col - the column permutation
3235: - info - options for factorization, includes
3236: .vb
3237: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3238: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3239: .ve
3241: Level: developer
3243: Notes:
3244: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3246: Most users should employ the simplified `KSP` interface for linear solvers
3247: instead of working directly with matrix algebra routines such as this.
3248: See, e.g., `KSPCreate()`.
3250: Fortran Note:
3251: A valid (non-null) `info` argument must be provided
3253: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3254: @*/
3255: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3256: {
3257: MatFactorInfo tinfo;
3259: PetscFunctionBegin;
3264: if (info) PetscAssertPointer(info, 5);
3267: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3268: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3269: MatCheckPreallocated(mat, 2);
3270: if (!info) {
3271: PetscCall(MatFactorInfoInitialize(&tinfo));
3272: info = &tinfo;
3273: }
3275: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3276: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3277: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3278: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3279: PetscFunctionReturn(PETSC_SUCCESS);
3280: }
3282: /*@
3283: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3284: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3286: Collective
3288: Input Parameters:
3289: + fact - the factor matrix obtained with `MatGetFactor()`
3290: . mat - the matrix
3291: - info - options for factorization
3293: Level: developer
3295: Notes:
3296: See `MatLUFactor()` for in-place factorization. See
3297: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3299: Most users should employ the `KSP` interface for linear solvers
3300: instead of working directly with matrix algebra routines such as this.
3301: See, e.g., `KSPCreate()`.
3303: Fortran Note:
3304: A valid (non-null) `info` argument must be provided
3306: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3307: @*/
3308: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3309: {
3310: MatFactorInfo tinfo;
3312: PetscFunctionBegin;
3317: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3318: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3319: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3321: MatCheckPreallocated(mat, 2);
3322: if (!info) {
3323: PetscCall(MatFactorInfoInitialize(&tinfo));
3324: info = &tinfo;
3325: }
3327: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3328: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3329: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3330: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3331: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3332: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3333: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3334: PetscFunctionReturn(PETSC_SUCCESS);
3335: }
3337: /*@
3338: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3339: symmetric matrix.
3341: Collective
3343: Input Parameters:
3344: + mat - the matrix
3345: . perm - row and column permutations
3346: - info - expected fill as ratio of original fill
3348: Level: developer
3350: Notes:
3351: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3352: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3354: Most users should employ the `KSP` interface for linear solvers
3355: instead of working directly with matrix algebra routines such as this.
3356: See, e.g., `KSPCreate()`.
3358: Fortran Note:
3359: A valid (non-null) `info` argument must be provided
3361: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3362: `MatGetOrdering()`
3363: @*/
3364: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3365: {
3366: MatFactorInfo tinfo;
3368: PetscFunctionBegin;
3371: if (info) PetscAssertPointer(info, 3);
3373: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3374: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3375: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3376: MatCheckPreallocated(mat, 1);
3377: if (!info) {
3378: PetscCall(MatFactorInfoInitialize(&tinfo));
3379: info = &tinfo;
3380: }
3382: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3383: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3384: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3385: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3386: PetscFunctionReturn(PETSC_SUCCESS);
3387: }
3389: /*@
3390: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3391: of a symmetric matrix.
3393: Collective
3395: Input Parameters:
3396: + fact - the factor matrix obtained with `MatGetFactor()`
3397: . mat - the matrix
3398: . perm - row and column permutations
3399: - info - options for factorization, includes
3400: .vb
3401: fill - expected fill as ratio of original fill.
3402: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3403: Run with the option -info to determine an optimal value to use
3404: .ve
3406: Level: developer
3408: Notes:
3409: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3410: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3412: Most users should employ the `KSP` interface for linear solvers
3413: instead of working directly with matrix algebra routines such as this.
3414: See, e.g., `KSPCreate()`.
3416: Fortran Note:
3417: A valid (non-null) `info` argument must be provided
3419: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3420: `MatGetOrdering()`
3421: @*/
3422: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3423: {
3424: MatFactorInfo tinfo;
3426: PetscFunctionBegin;
3430: if (info) PetscAssertPointer(info, 4);
3433: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3434: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3435: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3436: MatCheckPreallocated(mat, 2);
3437: if (!info) {
3438: PetscCall(MatFactorInfoInitialize(&tinfo));
3439: info = &tinfo;
3440: }
3442: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3443: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3444: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3445: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3446: PetscFunctionReturn(PETSC_SUCCESS);
3447: }
3449: /*@
3450: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3451: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3452: `MatCholeskyFactorSymbolic()`.
3454: Collective
3456: Input Parameters:
3457: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3458: . mat - the initial matrix that is to be factored
3459: - info - options for factorization
3461: Level: developer
3463: Note:
3464: Most users should employ the `KSP` interface for linear solvers
3465: instead of working directly with matrix algebra routines such as this.
3466: See, e.g., `KSPCreate()`.
3468: Fortran Note:
3469: A valid (non-null) `info` argument must be provided
3471: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3472: @*/
3473: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3474: {
3475: MatFactorInfo tinfo;
3477: PetscFunctionBegin;
3482: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3483: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3484: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3485: MatCheckPreallocated(mat, 2);
3486: if (!info) {
3487: PetscCall(MatFactorInfoInitialize(&tinfo));
3488: info = &tinfo;
3489: }
3491: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3492: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3493: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3494: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3495: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3496: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3497: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3498: PetscFunctionReturn(PETSC_SUCCESS);
3499: }
3501: /*@
3502: MatQRFactor - Performs in-place QR factorization of matrix.
3504: Collective
3506: Input Parameters:
3507: + mat - the matrix
3508: . col - column permutation
3509: - info - options for factorization, includes
3510: .vb
3511: fill - expected fill as ratio of original fill.
3512: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3513: Run with the option -info to determine an optimal value to use
3514: .ve
3516: Level: developer
3518: Notes:
3519: Most users should employ the `KSP` interface for linear solvers
3520: instead of working directly with matrix algebra routines such as this.
3521: See, e.g., `KSPCreate()`.
3523: This changes the state of the matrix to a factored matrix; it cannot be used
3524: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3526: Fortran Note:
3527: A valid (non-null) `info` argument must be provided
3529: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3530: `MatSetUnfactored()`
3531: @*/
3532: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3533: {
3534: PetscFunctionBegin;
3537: if (info) PetscAssertPointer(info, 3);
3539: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3540: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3541: MatCheckPreallocated(mat, 1);
3542: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3543: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3544: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3545: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3546: PetscFunctionReturn(PETSC_SUCCESS);
3547: }
3549: /*@
3550: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3551: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3553: Collective
3555: Input Parameters:
3556: + fact - the factor matrix obtained with `MatGetFactor()`
3557: . mat - the matrix
3558: . col - column permutation
3559: - info - options for factorization, includes
3560: .vb
3561: fill - expected fill as ratio of original fill.
3562: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3563: Run with the option -info to determine an optimal value to use
3564: .ve
3566: Level: developer
3568: Note:
3569: Most users should employ the `KSP` interface for linear solvers
3570: instead of working directly with matrix algebra routines such as this.
3571: See, e.g., `KSPCreate()`.
3573: Fortran Note:
3574: A valid (non-null) `info` argument must be provided
3576: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3577: @*/
3578: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3579: {
3580: MatFactorInfo tinfo;
3582: PetscFunctionBegin;
3586: if (info) PetscAssertPointer(info, 4);
3589: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3590: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3591: MatCheckPreallocated(mat, 2);
3592: if (!info) {
3593: PetscCall(MatFactorInfoInitialize(&tinfo));
3594: info = &tinfo;
3595: }
3597: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3598: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3599: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3600: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3601: PetscFunctionReturn(PETSC_SUCCESS);
3602: }
3604: /*@
3605: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3606: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3608: Collective
3610: Input Parameters:
3611: + fact - the factor matrix obtained with `MatGetFactor()`
3612: . mat - the matrix
3613: - info - options for factorization
3615: Level: developer
3617: Notes:
3618: See `MatQRFactor()` for in-place factorization.
3620: Most users should employ the `KSP` interface for linear solvers
3621: instead of working directly with matrix algebra routines such as this.
3622: See, e.g., `KSPCreate()`.
3624: Fortran Note:
3625: A valid (non-null) `info` argument must be provided
3627: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3628: @*/
3629: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3630: {
3631: MatFactorInfo tinfo;
3633: PetscFunctionBegin;
3638: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3639: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3640: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3642: MatCheckPreallocated(mat, 2);
3643: if (!info) {
3644: PetscCall(MatFactorInfoInitialize(&tinfo));
3645: info = &tinfo;
3646: }
3648: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3649: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3650: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3651: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3652: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3653: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3654: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3655: PetscFunctionReturn(PETSC_SUCCESS);
3656: }
3658: /*@
3659: MatSolve - Solves $A x = b$, given a factored matrix.
3661: Neighbor-wise Collective
3663: Input Parameters:
3664: + mat - the factored matrix
3665: - b - the right-hand-side vector
3667: Output Parameter:
3668: . x - the result vector
3670: Level: developer
3672: Notes:
3673: The vectors `b` and `x` cannot be the same. I.e., one cannot
3674: call `MatSolve`(A,x,x).
3676: Most users should employ the `KSP` interface for linear solvers
3677: instead of working directly with matrix algebra routines such as this.
3678: See, e.g., `KSPCreate()`.
3680: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3681: @*/
3682: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3683: {
3684: PetscFunctionBegin;
3689: PetscCheckSameComm(mat, 1, b, 2);
3690: PetscCheckSameComm(mat, 1, x, 3);
3691: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3692: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3693: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3694: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3695: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3696: MatCheckPreallocated(mat, 1);
3698: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3699: PetscCall(VecFlag(x, mat->factorerrortype));
3700: if (mat->factorerrortype) {
3701: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3702: } else PetscUseTypeMethod(mat, solve, b, x);
3703: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3704: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3705: PetscFunctionReturn(PETSC_SUCCESS);
3706: }
3708: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3709: {
3710: Vec b, x;
3711: PetscInt N, i;
3712: PetscErrorCode (*f)(Mat, Vec, Vec);
3713: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3715: PetscFunctionBegin;
3716: if (A->factorerrortype) {
3717: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3718: PetscCall(MatSetInf(X));
3719: PetscFunctionReturn(PETSC_SUCCESS);
3720: }
3721: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3722: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3723: PetscCall(MatBoundToCPU(A, &Abound));
3724: if (!Abound) {
3725: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3726: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3727: }
3728: #if PetscDefined(HAVE_CUDA)
3729: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3730: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3731: #elif PetscDefined(HAVE_HIP)
3732: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3733: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3734: #endif
3735: PetscCall(MatGetSize(B, NULL, &N));
3736: for (i = 0; i < N; i++) {
3737: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3738: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3739: PetscCall((*f)(A, b, x));
3740: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3741: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3742: }
3743: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3744: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3745: PetscFunctionReturn(PETSC_SUCCESS);
3746: }
3748: /*@
3749: MatMatSolve - Solves $A X = B$, given a factored matrix.
3751: Neighbor-wise Collective
3753: Input Parameters:
3754: + A - the factored matrix
3755: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3757: Output Parameter:
3758: . X - the result matrix (dense matrix)
3760: Level: developer
3762: Note:
3763: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3764: otherwise, `B` and `X` cannot be the same.
3766: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3767: @*/
3768: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3769: {
3770: PetscFunctionBegin;
3775: PetscCheckSameComm(A, 1, B, 2);
3776: PetscCheckSameComm(A, 1, X, 3);
3777: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3778: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3779: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3780: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3781: MatCheckPreallocated(A, 1);
3783: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3784: if (!A->ops->matsolve) {
3785: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3786: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3787: } else PetscUseTypeMethod(A, matsolve, B, X);
3788: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3789: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3790: PetscFunctionReturn(PETSC_SUCCESS);
3791: }
3793: /*@
3794: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3796: Neighbor-wise Collective
3798: Input Parameters:
3799: + A - the factored matrix
3800: - B - the right-hand-side matrix (`MATDENSE` matrix)
3802: Output Parameter:
3803: . X - the result matrix (dense matrix)
3805: Level: developer
3807: Note:
3808: The matrices `B` and `X` cannot be the same. I.e., one cannot
3809: call `MatMatSolveTranspose`(A,X,X).
3811: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3812: @*/
3813: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3814: {
3815: PetscFunctionBegin;
3820: PetscCheckSameComm(A, 1, B, 2);
3821: PetscCheckSameComm(A, 1, X, 3);
3822: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3823: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3824: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3825: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3826: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3827: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3828: MatCheckPreallocated(A, 1);
3830: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3831: if (!A->ops->matsolvetranspose) {
3832: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3833: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3834: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3835: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3836: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3837: PetscFunctionReturn(PETSC_SUCCESS);
3838: }
3840: /*@
3841: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3843: Neighbor-wise Collective
3845: Input Parameters:
3846: + A - the factored matrix
3847: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3849: Output Parameter:
3850: . X - the result matrix (dense matrix)
3852: Level: developer
3854: Note:
3855: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3856: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3858: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3859: @*/
3860: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3861: {
3862: PetscFunctionBegin;
3867: PetscCheckSameComm(A, 1, Bt, 2);
3868: PetscCheckSameComm(A, 1, X, 3);
3870: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3871: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3872: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3873: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3874: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3875: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3876: MatCheckPreallocated(A, 1);
3878: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3879: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3880: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3881: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3882: PetscFunctionReturn(PETSC_SUCCESS);
3883: }
3885: /*@
3886: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3887: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3889: Neighbor-wise Collective
3891: Input Parameters:
3892: + mat - the factored matrix
3893: - b - the right-hand-side vector
3895: Output Parameter:
3896: . x - the result vector
3898: Level: developer
3900: Notes:
3901: `MatSolve()` should be used for most applications, as it performs
3902: a forward solve followed by a backward solve.
3904: The vectors `b` and `x` cannot be the same, i.e., one cannot
3905: call `MatForwardSolve`(A,x,x).
3907: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3908: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3909: `MatForwardSolve()` solves $U^T*D y = b$, and
3910: `MatBackwardSolve()` solves $U x = y$.
3911: Thus they do not provide a symmetric preconditioner.
3913: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3914: @*/
3915: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3916: {
3917: PetscFunctionBegin;
3922: PetscCheckSameComm(mat, 1, b, 2);
3923: PetscCheckSameComm(mat, 1, x, 3);
3924: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3925: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3926: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3927: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3928: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3929: MatCheckPreallocated(mat, 1);
3931: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3932: PetscUseTypeMethod(mat, forwardsolve, b, x);
3933: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3934: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3935: PetscFunctionReturn(PETSC_SUCCESS);
3936: }
3938: /*@
3939: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3940: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3942: Neighbor-wise Collective
3944: Input Parameters:
3945: + mat - the factored matrix
3946: - b - the right-hand-side vector
3948: Output Parameter:
3949: . x - the result vector
3951: Level: developer
3953: Notes:
3954: `MatSolve()` should be used for most applications, as it performs
3955: a forward solve followed by a backward solve.
3957: The vectors `b` and `x` cannot be the same. I.e., one cannot
3958: call `MatBackwardSolve`(A,x,x).
3960: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3961: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3962: `MatForwardSolve()` solves $U^T*D y = b$, and
3963: `MatBackwardSolve()` solves $U x = y$.
3964: Thus they do not provide a symmetric preconditioner.
3966: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3967: @*/
3968: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3969: {
3970: PetscFunctionBegin;
3975: PetscCheckSameComm(mat, 1, b, 2);
3976: PetscCheckSameComm(mat, 1, x, 3);
3977: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3978: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3979: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3980: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3981: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3982: MatCheckPreallocated(mat, 1);
3984: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3985: PetscUseTypeMethod(mat, backwardsolve, b, x);
3986: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3987: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3988: PetscFunctionReturn(PETSC_SUCCESS);
3989: }
3991: /*@
3992: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
3994: Neighbor-wise Collective
3996: Input Parameters:
3997: + mat - the factored matrix
3998: . b - the right-hand-side vector
3999: - y - the vector to be added to
4001: Output Parameter:
4002: . x - the result vector
4004: Level: developer
4006: Note:
4007: The vectors `b` and `x` cannot be the same. I.e., one cannot
4008: call `MatSolveAdd`(A,x,y,x).
4010: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
4011: @*/
4012: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
4013: {
4014: PetscScalar one = 1.0;
4015: Vec tmp;
4017: PetscFunctionBegin;
4023: PetscCheckSameComm(mat, 1, b, 2);
4024: PetscCheckSameComm(mat, 1, y, 3);
4025: PetscCheckSameComm(mat, 1, x, 4);
4026: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4027: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4028: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4029: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4030: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4031: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4032: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4033: MatCheckPreallocated(mat, 1);
4035: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4036: PetscCall(VecFlag(x, mat->factorerrortype));
4037: if (mat->factorerrortype) {
4038: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4039: } else if (mat->ops->solveadd) {
4040: PetscUseTypeMethod(mat, solveadd, b, y, x);
4041: } else {
4042: /* do the solve then the add manually */
4043: if (x != y) {
4044: PetscCall(MatSolve(mat, b, x));
4045: PetscCall(VecAXPY(x, one, y));
4046: } else {
4047: PetscCall(VecDuplicate(x, &tmp));
4048: PetscCall(VecCopy(x, tmp));
4049: PetscCall(MatSolve(mat, b, x));
4050: PetscCall(VecAXPY(x, one, tmp));
4051: PetscCall(VecDestroy(&tmp));
4052: }
4053: }
4054: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4055: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4056: PetscFunctionReturn(PETSC_SUCCESS);
4057: }
4059: /*@
4060: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4062: Neighbor-wise Collective
4064: Input Parameters:
4065: + mat - the factored matrix
4066: - b - the right-hand-side vector
4068: Output Parameter:
4069: . x - the result vector
4071: Level: developer
4073: Notes:
4074: The vectors `b` and `x` cannot be the same. I.e., one cannot
4075: call `MatSolveTranspose`(A,x,x).
4077: Most users should employ the `KSP` interface for linear solvers
4078: instead of working directly with matrix algebra routines such as this.
4079: See, e.g., `KSPCreate()`.
4081: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4082: @*/
4083: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4084: {
4085: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4087: PetscFunctionBegin;
4092: PetscCheckSameComm(mat, 1, b, 2);
4093: PetscCheckSameComm(mat, 1, x, 3);
4094: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4095: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4096: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4097: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4098: MatCheckPreallocated(mat, 1);
4099: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4100: PetscCall(VecFlag(x, mat->factorerrortype));
4101: if (mat->factorerrortype) {
4102: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4103: } else {
4104: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4105: PetscCall((*f)(mat, b, x));
4106: }
4107: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4108: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4109: PetscFunctionReturn(PETSC_SUCCESS);
4110: }
4112: /*@
4113: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4114: factored matrix.
4116: Neighbor-wise Collective
4118: Input Parameters:
4119: + mat - the factored matrix
4120: . b - the right-hand-side vector
4121: - y - the vector to be added to
4123: Output Parameter:
4124: . x - the result vector
4126: Level: developer
4128: Note:
4129: The vectors `b` and `x` cannot be the same. I.e., one cannot
4130: call `MatSolveTransposeAdd`(A,x,y,x).
4132: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4133: @*/
4134: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4135: {
4136: PetscScalar one = 1.0;
4137: Vec tmp;
4138: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4140: PetscFunctionBegin;
4146: PetscCheckSameComm(mat, 1, b, 2);
4147: PetscCheckSameComm(mat, 1, y, 3);
4148: PetscCheckSameComm(mat, 1, x, 4);
4149: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4150: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4151: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4152: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4153: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4154: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4155: MatCheckPreallocated(mat, 1);
4157: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4158: PetscCall(VecFlag(x, mat->factorerrortype));
4159: if (mat->factorerrortype) {
4160: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4161: } else if (f) {
4162: PetscCall((*f)(mat, b, y, x));
4163: } else {
4164: /* do the solve then the add manually */
4165: if (x != y) {
4166: PetscCall(MatSolveTranspose(mat, b, x));
4167: PetscCall(VecAXPY(x, one, y));
4168: } else {
4169: PetscCall(VecDuplicate(x, &tmp));
4170: PetscCall(VecCopy(x, tmp));
4171: PetscCall(MatSolveTranspose(mat, b, x));
4172: PetscCall(VecAXPY(x, one, tmp));
4173: PetscCall(VecDestroy(&tmp));
4174: }
4175: }
4176: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4177: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4178: PetscFunctionReturn(PETSC_SUCCESS);
4179: }
4181: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4182: /*@
4183: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4185: Neighbor-wise Collective
4187: Input Parameters:
4188: + mat - the matrix
4189: . b - the right-hand side
4190: . omega - the relaxation factor
4191: . flag - flag indicating the type of SOR (see below)
4192: . shift - diagonal shift
4193: . its - the number of iterations
4194: - lits - the number of local iterations
4196: Output Parameter:
4197: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4199: SOR Flags:
4200: + `SOR_FORWARD_SWEEP` - forward SOR
4201: . `SOR_BACKWARD_SWEEP` - backward SOR
4202: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4203: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4204: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4205: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4206: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4207: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4208: upper/lower triangular part of matrix to
4209: vector (with omega)
4210: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4212: Level: developer
4214: Notes:
4215: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4216: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4217: on each processor.
4219: Application programmers will not generally use `MatSOR()` directly,
4220: but instead will employ the `KSP`/`PC` interface.
4222: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4224: Most users should employ the `KSP` interface for linear solvers
4225: instead of working directly with matrix algebra routines such as this.
4226: See, e.g., `KSPCreate()`.
4228: Vectors `x` and `b` CANNOT be the same
4230: The flags are implemented as bitwise inclusive or operations.
4231: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4232: to specify a zero initial guess for SSOR.
4234: Developer Note:
4235: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4237: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4238: @*/
4239: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4240: {
4241: PetscFunctionBegin;
4246: PetscCheckSameComm(mat, 1, b, 2);
4247: PetscCheckSameComm(mat, 1, x, 8);
4248: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4249: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4250: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4251: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4252: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4253: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4254: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4255: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4257: MatCheckPreallocated(mat, 1);
4258: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4259: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4260: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4261: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4262: PetscFunctionReturn(PETSC_SUCCESS);
4263: }
4265: /*
4266: Default matrix copy routine.
4267: */
4268: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4269: {
4270: PetscInt i, rstart = 0, rend = 0, nz;
4271: const PetscInt *cwork;
4272: const PetscScalar *vwork;
4274: PetscFunctionBegin;
4275: if (B->assembled) PetscCall(MatZeroEntries(B));
4276: if (str == SAME_NONZERO_PATTERN) {
4277: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4278: for (i = rstart; i < rend; i++) {
4279: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4280: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4281: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4282: }
4283: } else {
4284: PetscCall(MatAYPX(B, 0.0, A, str));
4285: }
4286: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4287: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4288: PetscFunctionReturn(PETSC_SUCCESS);
4289: }
4291: /*@
4292: MatCopy - Copies a matrix to another matrix.
4294: Collective
4296: Input Parameters:
4297: + A - the matrix
4298: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4300: Output Parameter:
4301: . B - where the copy is put
4303: Level: intermediate
4305: Notes:
4306: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4308: `MatCopy()` copies the matrix entries of a matrix to another existing
4309: matrix (after first zeroing the second matrix). A related routine is
4310: `MatConvert()`, which first creates a new matrix and then copies the data.
4312: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4313: @*/
4314: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4315: {
4316: PetscInt i;
4318: PetscFunctionBegin;
4323: PetscCheckSameComm(A, 1, B, 2);
4324: MatCheckPreallocated(B, 2);
4325: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4326: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4327: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4328: A->cmap->N, B->cmap->N);
4329: MatCheckPreallocated(A, 1);
4330: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4332: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4333: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4334: else PetscCall(MatCopy_Basic(A, B, str));
4336: B->stencil.dim = A->stencil.dim;
4337: B->stencil.noc = A->stencil.noc;
4338: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4339: B->stencil.dims[i] = A->stencil.dims[i];
4340: B->stencil.starts[i] = A->stencil.starts[i];
4341: }
4343: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4344: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4345: PetscFunctionReturn(PETSC_SUCCESS);
4346: }
4348: /*@
4349: MatConvert - Converts a matrix to another matrix, either of the same
4350: or different type.
4352: Collective
4354: Input Parameters:
4355: + mat - the matrix
4356: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4357: same type as the original matrix.
4358: - reuse - denotes if the destination matrix is to be created or reused.
4359: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input `Mat` to be changed to contain the matrix in the new format), otherwise use
4360: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4362: Output Parameter:
4363: . M - pointer to place new matrix
4365: Level: intermediate
4367: Notes:
4368: `MatConvert()` first creates a new matrix and then copies the data from
4369: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4370: entries of one matrix to another already existing matrix context.
4372: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4373: the MPI communicator of the generated matrix is always the same as the communicator
4374: of the input matrix.
4376: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4377: @*/
4378: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4379: {
4380: PetscBool sametype, issame, flg;
4381: PetscBool3 issymmetric, ishermitian;
4382: char convname[256], mtype[256];
4383: Mat B;
4385: PetscFunctionBegin;
4388: PetscAssertPointer(M, 4);
4389: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4390: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4391: MatCheckPreallocated(mat, 1);
4393: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4394: if (flg) newtype = mtype;
4396: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4397: PetscCall(PetscStrcmp(newtype, "same", &issame));
4398: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4399: if (reuse == MAT_REUSE_MATRIX) {
4401: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4402: }
4404: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4405: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4406: PetscFunctionReturn(PETSC_SUCCESS);
4407: }
4409: /* Cache Mat options because some converters use MatHeaderReplace */
4410: issymmetric = mat->symmetric;
4411: ishermitian = mat->hermitian;
4413: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4414: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4415: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4416: } else {
4417: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4418: const char *prefix[3] = {"seq", "mpi", ""};
4419: PetscInt i;
4420: /*
4421: Order of precedence:
4422: 0) See if newtype is a superclass of the current matrix.
4423: 1) See if a specialized converter is known to the current matrix.
4424: 2) See if a specialized converter is known to the desired matrix class.
4425: 3) See if a good general converter is registered for the desired class
4426: (as of 6/27/03 only MATMPIADJ falls into this category).
4427: 4) See if a good general converter is known for the current matrix.
4428: 5) Use a really basic converter.
4429: */
4431: /* 0) See if newtype is a superclass of the current matrix.
4432: i.e mat is mpiaij and newtype is aij */
4433: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4434: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4435: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4436: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4437: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4438: if (flg) {
4439: if (reuse == MAT_INPLACE_MATRIX) {
4440: PetscCall(PetscInfo(mat, "Early return\n"));
4441: PetscFunctionReturn(PETSC_SUCCESS);
4442: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4443: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4444: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4445: PetscFunctionReturn(PETSC_SUCCESS);
4446: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4447: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4448: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4449: PetscFunctionReturn(PETSC_SUCCESS);
4450: }
4451: }
4452: }
4453: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4454: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4455: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4456: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4457: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4458: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4459: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4460: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4461: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4462: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4463: if (conv) goto foundconv;
4464: }
4466: /* 2) See if a specialized converter is known to the desired matrix class. */
4467: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4468: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4469: PetscCall(MatSetType(B, newtype));
4470: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4471: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4472: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4473: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4474: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4475: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4476: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4477: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4478: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4479: if (conv) {
4480: PetscCall(MatDestroy(&B));
4481: goto foundconv;
4482: }
4483: }
4485: /* 3) See if a good general converter is registered for the desired class */
4486: conv = B->ops->convertfrom;
4487: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4488: PetscCall(MatDestroy(&B));
4489: if (conv) goto foundconv;
4491: /* 4) See if a good general converter is known for the current matrix */
4492: if (mat->ops->convert) conv = mat->ops->convert;
4493: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4494: if (conv) goto foundconv;
4496: /* 5) Use a really basic converter. */
4497: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4498: conv = MatConvert_Basic;
4500: foundconv:
4501: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4502: PetscCall((*conv)(mat, newtype, reuse, M));
4503: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4504: /* the block sizes must be same if the mappings are copied over */
4505: (*M)->rmap->bs = mat->rmap->bs;
4506: (*M)->cmap->bs = mat->cmap->bs;
4507: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4508: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4509: (*M)->rmap->mapping = mat->rmap->mapping;
4510: (*M)->cmap->mapping = mat->cmap->mapping;
4511: }
4512: (*M)->stencil.dim = mat->stencil.dim;
4513: (*M)->stencil.noc = mat->stencil.noc;
4514: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4515: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4516: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4517: }
4518: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4519: }
4520: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4522: /* Copy Mat options */
4523: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4524: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4525: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4526: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4527: PetscFunctionReturn(PETSC_SUCCESS);
4528: }
4530: /*@
4531: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4533: Not Collective
4535: Input Parameter:
4536: . mat - the matrix, must be a factored matrix
4538: Output Parameter:
4539: . type - the string name of the package (do not free this string)
4541: Level: intermediate
4543: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4544: @*/
4545: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4546: {
4547: PetscErrorCode (*conv)(Mat, MatSolverType *);
4549: PetscFunctionBegin;
4552: PetscAssertPointer(type, 2);
4553: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4554: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4555: if (conv) PetscCall((*conv)(mat, type));
4556: else *type = MATSOLVERPETSC;
4557: PetscFunctionReturn(PETSC_SUCCESS);
4558: }
4560: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4561: struct _MatSolverTypeForSpecifcType {
4562: MatType mtype;
4563: /* no entry for MAT_FACTOR_NONE */
4564: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4565: MatSolverTypeForSpecifcType next;
4566: };
4568: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4569: struct _MatSolverTypeHolder {
4570: char *name;
4571: MatSolverTypeForSpecifcType handlers;
4572: MatSolverTypeHolder next;
4573: };
4575: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4577: /*@C
4578: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4580: Logically Collective, No Fortran Support
4582: Input Parameters:
4583: + package - name of the package, for example `petsc` or `superlu`
4584: . mtype - the matrix type that works with this package
4585: . ftype - the type of factorization supported by the package
4586: - createfactor - routine that will create the factored matrix ready to be used
4588: Level: developer
4590: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4591: `MatGetFactor()`
4592: @*/
4593: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4594: {
4595: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4596: PetscBool flg;
4597: MatSolverTypeForSpecifcType inext, iprev = NULL;
4599: PetscFunctionBegin;
4600: PetscCall(MatInitializePackage());
4601: if (!next) {
4602: PetscCall(PetscNew(&MatSolverTypeHolders));
4603: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4604: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4605: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4606: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4607: PetscFunctionReturn(PETSC_SUCCESS);
4608: }
4609: while (next) {
4610: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4611: if (flg) {
4612: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4613: inext = next->handlers;
4614: while (inext) {
4615: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4616: if (flg) {
4617: inext->createfactor[(int)ftype - 1] = createfactor;
4618: PetscFunctionReturn(PETSC_SUCCESS);
4619: }
4620: iprev = inext;
4621: inext = inext->next;
4622: }
4623: PetscCall(PetscNew(&iprev->next));
4624: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4625: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4626: PetscFunctionReturn(PETSC_SUCCESS);
4627: }
4628: prev = next;
4629: next = next->next;
4630: }
4631: PetscCall(PetscNew(&prev->next));
4632: PetscCall(PetscStrallocpy(package, &prev->next->name));
4633: PetscCall(PetscNew(&prev->next->handlers));
4634: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4635: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4636: PetscFunctionReturn(PETSC_SUCCESS);
4637: }
4639: /*@C
4640: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4642: Input Parameters:
4643: + type - name of the package, for example `petsc` or `superlu`, if this is 'NULL', then the first result that satisfies the other criteria is returned
4644: . ftype - the type of factorization supported by the type
4645: - mtype - the matrix type that works with this type
4647: Output Parameters:
4648: + foundtype - `PETSC_TRUE` if the type was registered
4649: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4650: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4652: Calling sequence of `createfactor`:
4653: + A - the matrix providing the factor matrix
4654: . ftype - the `MatFactorType` of the factor requested
4655: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4657: Level: developer
4659: Note:
4660: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4661: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4662: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4664: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4665: `MatInitializePackage()`
4666: @*/
4667: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4668: {
4669: MatSolverTypeHolder next = MatSolverTypeHolders;
4670: PetscBool flg;
4671: MatSolverTypeForSpecifcType inext;
4673: PetscFunctionBegin;
4674: if (foundtype) *foundtype = PETSC_FALSE;
4675: if (foundmtype) *foundmtype = PETSC_FALSE;
4676: if (createfactor) *createfactor = NULL;
4678: if (type) {
4679: while (next) {
4680: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4681: if (flg) {
4682: if (foundtype) *foundtype = PETSC_TRUE;
4683: inext = next->handlers;
4684: while (inext) {
4685: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4686: if (flg) {
4687: if (foundmtype) *foundmtype = PETSC_TRUE;
4688: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4689: PetscFunctionReturn(PETSC_SUCCESS);
4690: }
4691: inext = inext->next;
4692: }
4693: }
4694: next = next->next;
4695: }
4696: } else {
4697: while (next) {
4698: inext = next->handlers;
4699: while (inext) {
4700: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4701: if (flg && inext->createfactor[(int)ftype - 1]) {
4702: if (foundtype) *foundtype = PETSC_TRUE;
4703: if (foundmtype) *foundmtype = PETSC_TRUE;
4704: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4705: PetscFunctionReturn(PETSC_SUCCESS);
4706: }
4707: inext = inext->next;
4708: }
4709: next = next->next;
4710: }
4711: /* try with base classes inext->mtype */
4712: next = MatSolverTypeHolders;
4713: while (next) {
4714: inext = next->handlers;
4715: while (inext) {
4716: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4717: if (flg && inext->createfactor[(int)ftype - 1]) {
4718: if (foundtype) *foundtype = PETSC_TRUE;
4719: if (foundmtype) *foundmtype = PETSC_TRUE;
4720: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4721: PetscFunctionReturn(PETSC_SUCCESS);
4722: }
4723: inext = inext->next;
4724: }
4725: next = next->next;
4726: }
4727: }
4728: PetscFunctionReturn(PETSC_SUCCESS);
4729: }
4731: PetscErrorCode MatSolverTypeDestroy(void)
4732: {
4733: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4734: MatSolverTypeForSpecifcType inext, iprev;
4736: PetscFunctionBegin;
4737: while (next) {
4738: PetscCall(PetscFree(next->name));
4739: inext = next->handlers;
4740: while (inext) {
4741: PetscCall(PetscFree(inext->mtype));
4742: iprev = inext;
4743: inext = inext->next;
4744: PetscCall(PetscFree(iprev));
4745: }
4746: prev = next;
4747: next = next->next;
4748: PetscCall(PetscFree(prev));
4749: }
4750: MatSolverTypeHolders = NULL;
4751: PetscFunctionReturn(PETSC_SUCCESS);
4752: }
4754: /*@
4755: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4757: Logically Collective
4759: Input Parameter:
4760: . mat - the matrix
4762: Output Parameter:
4763: . flg - `PETSC_TRUE` if uses the ordering
4765: Level: developer
4767: Note:
4768: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4769: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4771: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4772: @*/
4773: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4774: {
4775: PetscFunctionBegin;
4776: *flg = mat->canuseordering;
4777: PetscFunctionReturn(PETSC_SUCCESS);
4778: }
4780: /*@
4781: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4783: Logically Collective
4785: Input Parameters:
4786: + mat - the matrix obtained with `MatGetFactor()`
4787: - ftype - the factorization type to be used
4789: Output Parameter:
4790: . otype - the preferred ordering type
4792: Level: developer
4794: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4795: @*/
4796: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4797: {
4798: PetscFunctionBegin;
4799: *otype = mat->preferredordering[ftype];
4800: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4801: PetscFunctionReturn(PETSC_SUCCESS);
4802: }
4804: /*@
4805: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()
4807: Collective
4809: Input Parameters:
4810: + mat - the matrix
4811: . type - name of solver type, for example, `superlu`, `petsc` (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4812: the other criteria is returned
4813: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4815: Output Parameter:
4816: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4818: Options Database Keys:
4819: + -pc_factor_mat_solver_type <type> - choose the type at run time. When using `KSP` solvers
4820: . -pc_factor_mat_factor_on_host <bool> - do mat factorization on host (with device matrices). Default is doing it on device
4821: - -pc_factor_mat_solve_on_host <bool> - do mat solve on host (with device matrices). Default is doing it on device
4823: Level: intermediate
4825: Notes:
4826: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4827: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4829: Users usually access the factorization solvers via `KSP`
4831: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4832: such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir
4834: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4835: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4836: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4838: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4839: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4840: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4842: Developer Note:
4843: This should actually be called `MatCreateFactor()` since it creates a new factor object
4845: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4846: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4847: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4848: @*/
4849: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4850: {
4851: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4852: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4854: PetscFunctionBegin;
4858: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4859: MatCheckPreallocated(mat, 1);
4861: PetscCall(MatIsShell(mat, &shell));
4862: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4863: if (hasop) {
4864: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4865: PetscFunctionReturn(PETSC_SUCCESS);
4866: }
4868: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4869: if (!foundtype) {
4870: if (type) {
4871: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4872: ((PetscObject)mat)->type_name, type);
4873: } else {
4874: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4875: }
4876: }
4877: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4878: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4880: PetscCall((*conv)(mat, ftype, f));
4881: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4882: PetscFunctionReturn(PETSC_SUCCESS);
4883: }
4885: /*@
4886: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4888: Not Collective
4890: Input Parameters:
4891: + mat - the matrix
4892: . type - name of solver type, for example, `superlu`, `petsc` (to use PETSc's default)
4893: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4895: Output Parameter:
4896: . flg - PETSC_TRUE if the factorization is available
4898: Level: intermediate
4900: Notes:
4901: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4902: such as pastix, superlu, mumps etc.
4904: PETSc must have been ./configure to use the external solver, using the option --download-package
4906: Developer Note:
4907: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4909: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4910: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4911: @*/
4912: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4913: {
4914: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4916: PetscFunctionBegin;
4918: PetscAssertPointer(flg, 4);
4920: *flg = PETSC_FALSE;
4921: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
4923: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4924: MatCheckPreallocated(mat, 1);
4926: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4927: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4928: PetscFunctionReturn(PETSC_SUCCESS);
4929: }
4931: /*@
4932: MatDuplicate - Duplicates a matrix including the non-zero structure.
4934: Collective
4936: Input Parameters:
4937: + mat - the matrix
4938: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4939: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4941: Output Parameter:
4942: . M - pointer to place new matrix
4944: Level: intermediate
4946: Notes:
4947: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
4949: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
4951: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4953: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4954: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4955: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
4957: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4958: @*/
4959: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4960: {
4961: Mat B;
4962: VecType vtype;
4963: PetscInt i;
4964: PetscObject dm, container_h, container_d;
4965: void (*viewf)(void);
4967: PetscFunctionBegin;
4970: PetscAssertPointer(M, 3);
4971: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4972: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4973: MatCheckPreallocated(mat, 1);
4975: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4976: PetscUseTypeMethod(mat, duplicate, op, M);
4977: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4978: B = *M;
4980: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4981: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4982: PetscCall(MatGetVecType(mat, &vtype));
4983: PetscCall(MatSetVecType(B, vtype));
4985: B->stencil.dim = mat->stencil.dim;
4986: B->stencil.noc = mat->stencil.noc;
4987: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4988: B->stencil.dims[i] = mat->stencil.dims[i];
4989: B->stencil.starts[i] = mat->stencil.starts[i];
4990: }
4992: B->nooffproczerorows = mat->nooffproczerorows;
4993: B->nooffprocentries = mat->nooffprocentries;
4995: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4996: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4997: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
4998: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
4999: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
5000: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
5001: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
5002: PetscCall(PetscObjectStateIncrease((PetscObject)B));
5003: PetscFunctionReturn(PETSC_SUCCESS);
5004: }
5006: /*@
5007: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
5009: Logically Collective
5011: Input Parameter:
5012: . mat - the matrix
5014: Output Parameter:
5015: . v - the diagonal of the matrix
5017: Level: intermediate
5019: Note:
5020: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
5021: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
5022: is larger than `ndiag`, the values of the remaining entries are unspecified.
5024: Currently only correct in parallel for square matrices.
5026: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
5027: @*/
5028: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
5029: {
5030: PetscFunctionBegin;
5034: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5035: MatCheckPreallocated(mat, 1);
5036: if (PetscDefined(USE_DEBUG)) {
5037: PetscInt nv, row, col, ndiag;
5039: PetscCall(VecGetLocalSize(v, &nv));
5040: PetscCall(MatGetLocalSize(mat, &row, &col));
5041: ndiag = PetscMin(row, col);
5042: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5043: }
5045: PetscUseTypeMethod(mat, getdiagonal, v);
5046: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5047: PetscFunctionReturn(PETSC_SUCCESS);
5048: }
5050: /*@
5051: MatGetRowMin - Gets the minimum value (of the real part) of each
5052: row of the matrix
5054: Logically Collective
5056: Input Parameter:
5057: . mat - the matrix
5059: Output Parameters:
5060: + v - the vector for storing the maximums
5061: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5063: Level: intermediate
5065: Note:
5066: The result of this call are the same as if one converted the matrix to dense format
5067: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5069: This code is only implemented for a couple of matrix formats.
5071: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5072: `MatGetRowMax()`
5073: @*/
5074: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5075: {
5076: PetscFunctionBegin;
5080: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5082: if (!mat->cmap->N) {
5083: PetscCall(VecSet(v, PETSC_MAX_REAL));
5084: if (idx) {
5085: PetscInt i, m = mat->rmap->n;
5086: for (i = 0; i < m; i++) idx[i] = -1;
5087: }
5088: } else {
5089: MatCheckPreallocated(mat, 1);
5090: }
5091: PetscUseTypeMethod(mat, getrowmin, v, idx);
5092: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5093: PetscFunctionReturn(PETSC_SUCCESS);
5094: }
5096: /*@
5097: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5098: row of the matrix
5100: Logically Collective
5102: Input Parameter:
5103: . mat - the matrix
5105: Output Parameters:
5106: + v - the vector for storing the minimums
5107: - idx - the indices of the column found for each row (or `NULL` if not needed)
5109: Level: intermediate
5111: Notes:
5112: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5113: row is 0 (the first column).
5115: This code is only implemented for a couple of matrix formats.
5117: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5118: @*/
5119: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5120: {
5121: PetscFunctionBegin;
5125: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5126: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5128: if (!mat->cmap->N) {
5129: PetscCall(VecSet(v, 0.0));
5130: if (idx) {
5131: PetscInt i, m = mat->rmap->n;
5132: for (i = 0; i < m; i++) idx[i] = -1;
5133: }
5134: } else {
5135: MatCheckPreallocated(mat, 1);
5136: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5137: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5138: }
5139: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5140: PetscFunctionReturn(PETSC_SUCCESS);
5141: }
5143: /*@
5144: MatGetRowMax - Gets the maximum value (of the real part) of each
5145: row of the matrix
5147: Logically Collective
5149: Input Parameter:
5150: . mat - the matrix
5152: Output Parameters:
5153: + v - the vector for storing the maximums
5154: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5156: Level: intermediate
5158: Notes:
5159: The result of this call are the same as if one converted the matrix to dense format
5160: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5162: This code is only implemented for a couple of matrix formats.
5164: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5165: @*/
5166: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5167: {
5168: PetscFunctionBegin;
5172: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5174: if (!mat->cmap->N) {
5175: PetscCall(VecSet(v, PETSC_MIN_REAL));
5176: if (idx) {
5177: PetscInt i, m = mat->rmap->n;
5178: for (i = 0; i < m; i++) idx[i] = -1;
5179: }
5180: } else {
5181: MatCheckPreallocated(mat, 1);
5182: PetscUseTypeMethod(mat, getrowmax, v, idx);
5183: }
5184: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5185: PetscFunctionReturn(PETSC_SUCCESS);
5186: }
5188: /*@
5189: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5190: row of the matrix
5192: Logically Collective
5194: Input Parameter:
5195: . mat - the matrix
5197: Output Parameters:
5198: + v - the vector for storing the maximums
5199: - idx - the indices of the column found for each row (or `NULL` if not needed)
5201: Level: intermediate
5203: Notes:
5204: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5205: row is 0 (the first column).
5207: This code is only implemented for a couple of matrix formats.
5209: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5210: @*/
5211: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5212: {
5213: PetscFunctionBegin;
5217: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5219: if (!mat->cmap->N) {
5220: PetscCall(VecSet(v, 0.0));
5221: if (idx) {
5222: PetscInt i, m = mat->rmap->n;
5223: for (i = 0; i < m; i++) idx[i] = -1;
5224: }
5225: } else {
5226: MatCheckPreallocated(mat, 1);
5227: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5228: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5229: }
5230: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5231: PetscFunctionReturn(PETSC_SUCCESS);
5232: }
5234: /*@
5235: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5237: Logically Collective
5239: Input Parameter:
5240: . mat - the matrix
5242: Output Parameter:
5243: . v - the vector for storing the sum
5245: Level: intermediate
5247: This code is only implemented for a couple of matrix formats.
5249: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5250: @*/
5251: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5252: {
5253: PetscFunctionBegin;
5257: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5259: if (!mat->cmap->N) {
5260: PetscCall(VecSet(v, 0.0));
5261: } else {
5262: MatCheckPreallocated(mat, 1);
5263: PetscUseTypeMethod(mat, getrowsumabs, v);
5264: }
5265: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5266: PetscFunctionReturn(PETSC_SUCCESS);
5267: }
5269: /*@
5270: MatGetRowSum - Gets the sum of each row of the matrix
5272: Logically or Neighborhood Collective
5274: Input Parameter:
5275: . mat - the matrix
5277: Output Parameter:
5278: . v - the vector for storing the sum of rows
5280: Level: intermediate
5282: Note:
5283: This code is slow since it is not currently specialized for different formats
5285: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5286: @*/
5287: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5288: {
5289: Vec ones;
5291: PetscFunctionBegin;
5295: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5296: MatCheckPreallocated(mat, 1);
5297: PetscCall(MatCreateVecs(mat, &ones, NULL));
5298: PetscCall(VecSet(ones, 1.));
5299: PetscCall(MatMult(mat, ones, v));
5300: PetscCall(VecDestroy(&ones));
5301: PetscFunctionReturn(PETSC_SUCCESS);
5302: }
5304: /*@
5305: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5306: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5308: Collective
5310: Input Parameter:
5311: . mat - the matrix to provide the transpose
5313: Output Parameter:
5314: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5316: Level: advanced
5318: Note:
5319: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5320: routine allows bypassing that call.
5322: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5323: @*/
5324: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5325: {
5326: MatParentState *rb = NULL;
5328: PetscFunctionBegin;
5329: PetscCall(PetscNew(&rb));
5330: rb->id = ((PetscObject)mat)->id;
5331: rb->state = 0;
5332: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5333: PetscCall(PetscObjectContainerCompose((PetscObject)B, "MatTransposeParent", rb, PetscCtxDestroyDefault));
5334: PetscFunctionReturn(PETSC_SUCCESS);
5335: }
5337: /*@
5338: MatTranspose - Computes the transpose of a matrix, either in-place or out-of-place.
5340: Collective
5342: Input Parameters:
5343: + mat - the matrix to transpose
5344: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5346: Output Parameter:
5347: . B - the transpose of the matrix
5349: Level: intermediate
5351: Notes:
5352: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5354: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX` to store the transpose. If you already have a matrix to contain the
5355: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5357: If the nonzero structure of `mat` changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5359: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose but don't need the storage to be changed.
5360: For example, the result of `MatCreateTranspose()` will compute the transpose of the given matrix times a vector for matrix-vector products computed with `MatMult()`.
5362: If `mat` is unchanged from the last call this function returns immediately without recomputing the result
5364: If you only need the symbolic transpose of a matrix, and not the numerical values, use `MatTransposeSymbolic()`
5366: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5367: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5368: @*/
5369: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5370: {
5371: PetscContainer rB = NULL;
5372: MatParentState *rb = NULL;
5374: PetscFunctionBegin;
5377: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5378: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5379: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5380: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5381: MatCheckPreallocated(mat, 1);
5382: if (reuse == MAT_REUSE_MATRIX) {
5383: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5384: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5385: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5386: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5387: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5388: }
5390: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5391: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5392: PetscUseTypeMethod(mat, transpose, reuse, B);
5393: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5394: }
5395: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5397: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5398: if (reuse != MAT_INPLACE_MATRIX) {
5399: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5400: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5401: rb->state = ((PetscObject)mat)->state;
5402: rb->nonzerostate = mat->nonzerostate;
5403: }
5404: PetscFunctionReturn(PETSC_SUCCESS);
5405: }
5407: /*@
5408: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5410: Collective
5412: Input Parameter:
5413: . A - the matrix to transpose
5415: Output Parameter:
5416: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5417: numerical portion.
5419: Level: intermediate
5421: Note:
5422: This is not supported for many matrix types, use `MatTranspose()` in those cases
5424: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5425: @*/
5426: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5427: {
5428: PetscFunctionBegin;
5431: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5432: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5433: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5434: PetscUseTypeMethod(A, transposesymbolic, B);
5435: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5437: PetscCall(MatTransposeSetPrecursor(A, *B));
5438: PetscFunctionReturn(PETSC_SUCCESS);
5439: }
5441: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5442: {
5443: PetscContainer rB;
5444: MatParentState *rb;
5446: PetscFunctionBegin;
5449: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5450: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5451: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5452: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5453: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5454: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5455: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5456: PetscFunctionReturn(PETSC_SUCCESS);
5457: }
5459: /*@
5460: MatIsTranspose - Test whether a matrix is another one's transpose,
5461: or its own, in which case it tests symmetry.
5463: Collective
5465: Input Parameters:
5466: + A - the matrix to test
5467: . B - the matrix to test against, this can equal the first parameter
5468: - tol - tolerance, differences between entries smaller than this are counted as zero
5470: Output Parameter:
5471: . flg - the result
5473: Level: intermediate
5475: Notes:
5476: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5477: test involves parallel copies of the block off-diagonal parts of the matrix.
5479: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5480: @*/
5481: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5482: {
5483: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5485: PetscFunctionBegin;
5488: PetscAssertPointer(flg, 4);
5489: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5490: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5491: *flg = PETSC_FALSE;
5492: if (f && g) {
5493: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5494: PetscCall((*f)(A, B, tol, flg));
5495: } else {
5496: MatType mattype;
5498: PetscCall(MatGetType(f ? B : A, &mattype));
5499: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5500: }
5501: PetscFunctionReturn(PETSC_SUCCESS);
5502: }
5504: /*@
5505: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5507: Collective
5509: Input Parameters:
5510: + mat - the matrix to transpose and complex conjugate
5511: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5513: Output Parameter:
5514: . B - the Hermitian transpose
5516: Level: intermediate
5518: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5519: @*/
5520: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5521: {
5522: PetscFunctionBegin;
5523: PetscCall(MatTranspose(mat, reuse, B));
5524: #if defined(PETSC_USE_COMPLEX)
5525: PetscCall(MatConjugate(*B));
5526: #endif
5527: PetscFunctionReturn(PETSC_SUCCESS);
5528: }
5530: /*@
5531: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5533: Collective
5535: Input Parameters:
5536: + A - the matrix to test
5537: . B - the matrix to test against, this can equal the first parameter
5538: - tol - tolerance, differences between entries smaller than this are counted as zero
5540: Output Parameter:
5541: . flg - the result
5543: Level: intermediate
5545: Notes:
5546: Only available for `MATAIJ` matrices.
5548: The sequential algorithm
5549: has a running time of the order of the number of nonzeros; the parallel
5550: test involves parallel copies of the block off-diagonal parts of the matrix.
5552: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5553: @*/
5554: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5555: {
5556: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5558: PetscFunctionBegin;
5561: PetscAssertPointer(flg, 4);
5562: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5563: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5564: if (f && g) {
5565: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5566: PetscCall((*f)(A, B, tol, flg));
5567: }
5568: PetscFunctionReturn(PETSC_SUCCESS);
5569: }
5571: /*@
5572: MatPermute - Creates a new matrix with rows and columns permuted from the
5573: original.
5575: Collective
5577: Input Parameters:
5578: + mat - the matrix to permute
5579: . row - row permutation, each processor supplies only the permutation for its rows
5580: - col - column permutation, each processor supplies only the permutation for its columns
5582: Output Parameter:
5583: . B - the permuted matrix
5585: Level: advanced
5587: Note:
5588: The index sets map from row/col of permuted matrix to row/col of original matrix.
5589: The index sets should be on the same communicator as mat and have the same local sizes.
5591: Developer Note:
5592: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5593: exploit the fact that row and col are permutations, consider implementing the
5594: more general `MatCreateSubMatrix()` instead.
5596: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5597: @*/
5598: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5599: {
5600: PetscFunctionBegin;
5605: PetscAssertPointer(B, 4);
5606: PetscCheckSameComm(mat, 1, row, 2);
5607: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5608: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5609: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5610: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5611: MatCheckPreallocated(mat, 1);
5613: if (mat->ops->permute) {
5614: PetscUseTypeMethod(mat, permute, row, col, B);
5615: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5616: } else {
5617: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5618: }
5619: PetscFunctionReturn(PETSC_SUCCESS);
5620: }
5622: /*@
5623: MatEqual - Compares two matrices.
5625: Collective
5627: Input Parameters:
5628: + A - the first matrix
5629: - B - the second matrix
5631: Output Parameter:
5632: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5634: Level: intermediate
5636: Note:
5637: If either of the matrix is "matrix-free", meaning the matrix entries are not stored explicitly then equality is determined by comparing
5638: the results of several matrix-vector product using randomly created vectors, see `MatMultEqual()`.
5640: .seealso: [](ch_matrices), `Mat`, `MatMultEqual()`
5641: @*/
5642: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5643: {
5644: PetscFunctionBegin;
5649: PetscAssertPointer(flg, 3);
5650: PetscCheckSameComm(A, 1, B, 2);
5651: MatCheckPreallocated(A, 1);
5652: MatCheckPreallocated(B, 2);
5653: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5654: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5655: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5656: B->cmap->N);
5657: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5658: PetscUseTypeMethod(A, equal, B, flg);
5659: } else {
5660: PetscCall(MatMultEqual(A, B, 10, flg));
5661: }
5662: PetscFunctionReturn(PETSC_SUCCESS);
5663: }
5665: /*@
5666: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5667: matrices that are stored as vectors. Either of the two scaling
5668: matrices can be `NULL`.
5670: Collective
5672: Input Parameters:
5673: + mat - the matrix to be scaled
5674: . l - the left scaling vector (or `NULL`)
5675: - r - the right scaling vector (or `NULL`)
5677: Level: intermediate
5679: Note:
5680: `MatDiagonalScale()` computes $A = LAR$, where
5681: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5682: The L scales the rows of the matrix, the R scales the columns of the matrix.
5684: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5685: @*/
5686: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5687: {
5688: PetscFunctionBegin;
5691: if (l) {
5693: PetscCheckSameComm(mat, 1, l, 2);
5694: }
5695: if (r) {
5697: PetscCheckSameComm(mat, 1, r, 3);
5698: }
5699: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5700: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5701: MatCheckPreallocated(mat, 1);
5702: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5704: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5705: PetscUseTypeMethod(mat, diagonalscale, l, r);
5706: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5707: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5708: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5709: PetscFunctionReturn(PETSC_SUCCESS);
5710: }
5712: /*@
5713: MatScale - Scales all elements of a matrix by a given number.
5715: Logically Collective
5717: Input Parameters:
5718: + mat - the matrix to be scaled
5719: - a - the scaling value
5721: Level: intermediate
5723: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5724: @*/
5725: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5726: {
5727: PetscFunctionBegin;
5730: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5731: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5733: MatCheckPreallocated(mat, 1);
5735: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5736: if (a != (PetscScalar)1.0) {
5737: PetscUseTypeMethod(mat, scale, a);
5738: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5739: }
5740: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5741: PetscFunctionReturn(PETSC_SUCCESS);
5742: }
5744: /*@
5745: MatNorm - Calculates various norms of a matrix.
5747: Collective
5749: Input Parameters:
5750: + mat - the matrix
5751: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5753: Output Parameter:
5754: . nrm - the resulting norm
5756: Level: intermediate
5758: .seealso: [](ch_matrices), `Mat`
5759: @*/
5760: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5761: {
5762: PetscFunctionBegin;
5765: PetscAssertPointer(nrm, 3);
5767: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5768: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5769: MatCheckPreallocated(mat, 1);
5771: PetscUseTypeMethod(mat, norm, type, nrm);
5772: PetscFunctionReturn(PETSC_SUCCESS);
5773: }
5775: /*
5776: This variable is used to prevent counting of MatAssemblyBegin() that
5777: are called from within a MatAssemblyEnd().
5778: */
5779: static PetscInt MatAssemblyEnd_InUse = 0;
5780: /*@
5781: MatAssemblyBegin - Begins assembling the matrix. This routine should
5782: be called after completing all calls to `MatSetValues()`.
5784: Collective
5786: Input Parameters:
5787: + mat - the matrix
5788: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5790: Level: beginner
5792: Notes:
5793: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5794: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5796: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5797: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5798: using the matrix.
5800: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5801: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5802: a global collective operation requiring all processes that share the matrix.
5804: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5805: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5806: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5808: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5809: @*/
5810: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5811: {
5812: PetscFunctionBegin;
5815: MatCheckPreallocated(mat, 1);
5816: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5817: if (mat->assembled) {
5818: mat->was_assembled = PETSC_TRUE;
5819: mat->assembled = PETSC_FALSE;
5820: }
5822: if (!MatAssemblyEnd_InUse) {
5823: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5824: PetscTryTypeMethod(mat, assemblybegin, type);
5825: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5826: } else PetscTryTypeMethod(mat, assemblybegin, type);
5827: PetscFunctionReturn(PETSC_SUCCESS);
5828: }
5830: /*@
5831: MatAssembled - Indicates if a matrix has been assembled and is ready for
5832: use; for example, in matrix-vector product.
5834: Not Collective
5836: Input Parameter:
5837: . mat - the matrix
5839: Output Parameter:
5840: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5842: Level: advanced
5844: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5845: @*/
5846: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5847: {
5848: PetscFunctionBegin;
5850: PetscAssertPointer(assembled, 2);
5851: *assembled = mat->assembled;
5852: PetscFunctionReturn(PETSC_SUCCESS);
5853: }
5855: /*@
5856: MatAssemblyEnd - Completes assembling the matrix. This routine should
5857: be called after `MatAssemblyBegin()`.
5859: Collective
5861: Input Parameters:
5862: + mat - the matrix
5863: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5865: Options Database Keys:
5866: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5867: . -mat_view ::ascii_info_detail - Prints more detailed info
5868: . -mat_view - Prints matrix in ASCII format
5869: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5870: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5871: . -display <name> - Sets display name (default is host)
5872: . -draw_pause <sec> - Sets number of seconds to pause after display
5873: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5874: . -viewer_socket_machine <machine> - Machine to use for socket
5875: . -viewer_socket_port <port> - Port number to use for socket
5876: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5878: Level: beginner
5880: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5881: @*/
5882: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5883: {
5884: static PetscInt inassm = 0;
5885: PetscBool flg = PETSC_FALSE;
5887: PetscFunctionBegin;
5891: inassm++;
5892: MatAssemblyEnd_InUse++;
5893: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5894: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5895: PetscTryTypeMethod(mat, assemblyend, type);
5896: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5897: } else PetscTryTypeMethod(mat, assemblyend, type);
5899: /* Flush assembly is not a true assembly */
5900: if (type != MAT_FLUSH_ASSEMBLY) {
5901: if (mat->num_ass) {
5902: if (!mat->symmetry_eternal) {
5903: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5904: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5905: }
5906: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5907: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5908: }
5909: mat->num_ass++;
5910: mat->assembled = PETSC_TRUE;
5911: mat->ass_nonzerostate = mat->nonzerostate;
5912: }
5914: mat->insertmode = NOT_SET_VALUES;
5915: MatAssemblyEnd_InUse--;
5916: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5917: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5918: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5920: if (mat->checksymmetryonassembly) {
5921: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5922: if (flg) {
5923: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5924: } else {
5925: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5926: }
5927: }
5928: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5929: }
5930: inassm--;
5931: PetscFunctionReturn(PETSC_SUCCESS);
5932: }
5934: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5935: /*@
5936: MatSetOption - Sets a parameter option for a matrix. Some options
5937: may be specific to certain storage formats. Some options
5938: determine how values will be inserted (or added). Sorted,
5939: row-oriented input will generally assemble the fastest. The default
5940: is row-oriented.
5942: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5944: Input Parameters:
5945: + mat - the matrix
5946: . op - the option, one of those listed below (and possibly others),
5947: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5949: Options Describing Matrix Structure:
5950: + `MAT_SPD` - symmetric positive definite
5951: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5952: . `MAT_HERMITIAN` - transpose is the complex conjugation
5953: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5954: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5955: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5956: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5958: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5959: do not need to be computed (usually at a high cost)
5961: Options For Use with `MatSetValues()`:
5962: Insert a logically dense subblock, which can be
5963: . `MAT_ROW_ORIENTED` - row-oriented (default)
5965: These options reflect the data you pass in with `MatSetValues()`; it has
5966: nothing to do with how the data is stored internally in the matrix
5967: data structure.
5969: When (re)assembling a matrix, we can restrict the input for
5970: efficiency/debugging purposes. These options include
5971: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5972: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5973: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5974: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5975: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5976: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5977: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5978: performance for very large process counts.
5979: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5980: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5981: functions, instead sending only neighbor messages.
5983: Level: intermediate
5985: Notes:
5986: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5988: Some options are relevant only for particular matrix types and
5989: are thus ignored by others. Other options are not supported by
5990: certain matrix types and will generate an error message if set.
5992: If using Fortran to compute a matrix, one may need to
5993: use the column-oriented option (or convert to the row-oriented
5994: format).
5996: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
5997: that would generate a new entry in the nonzero structure is instead
5998: ignored. Thus, if memory has not already been allocated for this particular
5999: data, then the insertion is ignored. For dense matrices, in which
6000: the entire array is allocated, no entries are ever ignored.
6001: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6003: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
6004: that would generate a new entry in the nonzero structure instead produces
6005: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6007: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
6008: that would generate a new entry that has not been preallocated will
6009: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
6010: only.) This is a useful flag when debugging matrix memory preallocation.
6011: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6013: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
6014: other processors should be dropped, rather than stashed.
6015: This is useful if you know that the "owning" processor is also
6016: always generating the correct matrix entries, so that PETSc need
6017: not transfer duplicate entries generated on another processor.
6019: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6020: searches during matrix assembly. When this flag is set, the hash table
6021: is created during the first matrix assembly. This hash table is
6022: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6023: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6024: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6025: supported by `MATMPIBAIJ` format only.
6027: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6028: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6030: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6031: a zero location in the matrix
6033: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6035: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6036: zero row routines and thus improves performance for very large process counts.
6038: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6039: part of the matrix (since they should match the upper triangular part).
6041: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6042: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6043: with finite difference schemes with non-periodic boundary conditions.
6045: Developer Note:
6046: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6047: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6048: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6049: not changed.
6051: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6052: @*/
6053: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6054: {
6055: PetscFunctionBegin;
6057: if (op > 0) {
6060: }
6062: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6064: switch (op) {
6065: case MAT_FORCE_DIAGONAL_ENTRIES:
6066: mat->force_diagonals = flg;
6067: PetscFunctionReturn(PETSC_SUCCESS);
6068: case MAT_NO_OFF_PROC_ENTRIES:
6069: mat->nooffprocentries = flg;
6070: PetscFunctionReturn(PETSC_SUCCESS);
6071: case MAT_SUBSET_OFF_PROC_ENTRIES:
6072: mat->assembly_subset = flg;
6073: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6074: #if !defined(PETSC_HAVE_MPIUNI)
6075: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6076: #endif
6077: mat->stash.first_assembly_done = PETSC_FALSE;
6078: }
6079: PetscFunctionReturn(PETSC_SUCCESS);
6080: case MAT_NO_OFF_PROC_ZERO_ROWS:
6081: mat->nooffproczerorows = flg;
6082: PetscFunctionReturn(PETSC_SUCCESS);
6083: case MAT_SPD:
6084: if (flg) {
6085: mat->spd = PETSC_BOOL3_TRUE;
6086: mat->symmetric = PETSC_BOOL3_TRUE;
6087: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6088: } else {
6089: mat->spd = PETSC_BOOL3_FALSE;
6090: }
6091: break;
6092: case MAT_SYMMETRIC:
6093: mat->symmetric = PetscBoolToBool3(flg);
6094: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6095: #if !defined(PETSC_USE_COMPLEX)
6096: mat->hermitian = PetscBoolToBool3(flg);
6097: #endif
6098: break;
6099: case MAT_HERMITIAN:
6100: mat->hermitian = PetscBoolToBool3(flg);
6101: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6102: #if !defined(PETSC_USE_COMPLEX)
6103: mat->symmetric = PetscBoolToBool3(flg);
6104: #endif
6105: break;
6106: case MAT_STRUCTURALLY_SYMMETRIC:
6107: mat->structurally_symmetric = PetscBoolToBool3(flg);
6108: break;
6109: case MAT_SYMMETRY_ETERNAL:
6110: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6111: mat->symmetry_eternal = flg;
6112: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6113: break;
6114: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6115: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6116: mat->structural_symmetry_eternal = flg;
6117: break;
6118: case MAT_SPD_ETERNAL:
6119: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6120: mat->spd_eternal = flg;
6121: if (flg) {
6122: mat->structural_symmetry_eternal = PETSC_TRUE;
6123: mat->symmetry_eternal = PETSC_TRUE;
6124: }
6125: break;
6126: case MAT_STRUCTURE_ONLY:
6127: mat->structure_only = flg;
6128: break;
6129: case MAT_SORTED_FULL:
6130: mat->sortedfull = flg;
6131: break;
6132: default:
6133: break;
6134: }
6135: PetscTryTypeMethod(mat, setoption, op, flg);
6136: PetscFunctionReturn(PETSC_SUCCESS);
6137: }
6139: /*@
6140: MatGetOption - Gets a parameter option that has been set for a matrix.
6142: Logically Collective
6144: Input Parameters:
6145: + mat - the matrix
6146: - op - the option, this only responds to certain options, check the code for which ones
6148: Output Parameter:
6149: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6151: Level: intermediate
6153: Notes:
6154: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6156: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6157: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6159: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6160: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6161: @*/
6162: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6163: {
6164: PetscFunctionBegin;
6168: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6169: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6171: switch (op) {
6172: case MAT_NO_OFF_PROC_ENTRIES:
6173: *flg = mat->nooffprocentries;
6174: break;
6175: case MAT_NO_OFF_PROC_ZERO_ROWS:
6176: *flg = mat->nooffproczerorows;
6177: break;
6178: case MAT_SYMMETRIC:
6179: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6180: break;
6181: case MAT_HERMITIAN:
6182: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6183: break;
6184: case MAT_STRUCTURALLY_SYMMETRIC:
6185: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6186: break;
6187: case MAT_SPD:
6188: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6189: break;
6190: case MAT_SYMMETRY_ETERNAL:
6191: *flg = mat->symmetry_eternal;
6192: break;
6193: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6194: *flg = mat->symmetry_eternal;
6195: break;
6196: default:
6197: break;
6198: }
6199: PetscFunctionReturn(PETSC_SUCCESS);
6200: }
6202: /*@
6203: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6204: this routine retains the old nonzero structure.
6206: Logically Collective
6208: Input Parameter:
6209: . mat - the matrix
6211: Level: intermediate
6213: Note:
6214: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6215: See the Performance chapter of the users manual for information on preallocating matrices.
6217: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6218: @*/
6219: PetscErrorCode MatZeroEntries(Mat mat)
6220: {
6221: PetscFunctionBegin;
6224: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6225: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6226: MatCheckPreallocated(mat, 1);
6228: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6229: PetscUseTypeMethod(mat, zeroentries);
6230: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6231: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6232: PetscFunctionReturn(PETSC_SUCCESS);
6233: }
6235: /*@
6236: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6237: of a set of rows and columns of a matrix.
6239: Collective
6241: Input Parameters:
6242: + mat - the matrix
6243: . numRows - the number of rows/columns to zero
6244: . rows - the global row indices
6245: . diag - value put in the diagonal of the eliminated rows
6246: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6247: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6249: Level: intermediate
6251: Notes:
6252: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6254: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6255: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6257: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6258: Krylov method to take advantage of the known solution on the zeroed rows.
6260: For the parallel case, all processes that share the matrix (i.e.,
6261: those in the communicator used for matrix creation) MUST call this
6262: routine, regardless of whether any rows being zeroed are owned by
6263: them.
6265: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6266: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6267: missing.
6269: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6270: list only rows local to itself).
6272: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6274: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6275: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6276: @*/
6277: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6278: {
6279: PetscFunctionBegin;
6282: if (numRows) PetscAssertPointer(rows, 3);
6283: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6284: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6285: MatCheckPreallocated(mat, 1);
6287: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6288: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6289: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6290: PetscFunctionReturn(PETSC_SUCCESS);
6291: }
6293: /*@
6294: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6295: of a set of rows and columns of a matrix.
6297: Collective
6299: Input Parameters:
6300: + mat - the matrix
6301: . is - the rows to zero
6302: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6303: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6304: - b - optional vector of right-hand side, that will be adjusted by provided solution
6306: Level: intermediate
6308: Note:
6309: See `MatZeroRowsColumns()` for details on how this routine operates.
6311: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6312: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6313: @*/
6314: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6315: {
6316: PetscInt numRows;
6317: const PetscInt *rows;
6319: PetscFunctionBegin;
6324: PetscCall(ISGetLocalSize(is, &numRows));
6325: PetscCall(ISGetIndices(is, &rows));
6326: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6327: PetscCall(ISRestoreIndices(is, &rows));
6328: PetscFunctionReturn(PETSC_SUCCESS);
6329: }
6331: /*@
6332: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6333: of a set of rows of a matrix.
6335: Collective
6337: Input Parameters:
6338: + mat - the matrix
6339: . numRows - the number of rows to zero
6340: . rows - the global row indices
6341: . diag - value put in the diagonal of the zeroed rows
6342: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6343: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6345: Level: intermediate
6347: Notes:
6348: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6350: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6352: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6353: Krylov method to take advantage of the known solution on the zeroed rows.
6355: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6356: from the matrix.
6358: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6359: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense
6360: formats this does not alter the nonzero structure.
6362: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6363: of the matrix is not changed the values are
6364: merely zeroed.
6366: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6367: formats can optionally remove the main diagonal entry from the
6368: nonzero structure as well, by passing 0.0 as the final argument).
6370: For the parallel case, all processes that share the matrix (i.e.,
6371: those in the communicator used for matrix creation) MUST call this
6372: routine, regardless of whether any rows being zeroed are owned by
6373: them.
6375: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6376: list only rows local to itself).
6378: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6379: owns that are to be zeroed. This saves a global synchronization in the implementation.
6381: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6382: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6383: @*/
6384: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6385: {
6386: PetscFunctionBegin;
6389: if (numRows) PetscAssertPointer(rows, 3);
6390: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6391: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6392: MatCheckPreallocated(mat, 1);
6394: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6395: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6396: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6397: PetscFunctionReturn(PETSC_SUCCESS);
6398: }
6400: /*@
6401: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6402: of a set of rows of a matrix indicated by an `IS`
6404: Collective
6406: Input Parameters:
6407: + mat - the matrix
6408: . is - index set, `IS`, of rows to remove (if `NULL` then no row is removed)
6409: . diag - value put in all diagonals of eliminated rows
6410: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6411: - b - optional vector of right-hand side, that will be adjusted by provided solution
6413: Level: intermediate
6415: Note:
6416: See `MatZeroRows()` for details on how this routine operates.
6418: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6419: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `IS`
6420: @*/
6421: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6422: {
6423: PetscInt numRows = 0;
6424: const PetscInt *rows = NULL;
6426: PetscFunctionBegin;
6429: if (is) {
6431: PetscCall(ISGetLocalSize(is, &numRows));
6432: PetscCall(ISGetIndices(is, &rows));
6433: }
6434: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6435: if (is) PetscCall(ISRestoreIndices(is, &rows));
6436: PetscFunctionReturn(PETSC_SUCCESS);
6437: }
6439: /*@
6440: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6441: of a set of rows of a matrix indicated by a `MatStencil`. These rows must be local to the process.
6443: Collective
6445: Input Parameters:
6446: + mat - the matrix
6447: . numRows - the number of rows to remove
6448: . rows - the grid coordinates (and component number when dof > 1) for matrix rows indicated by an array of `MatStencil`
6449: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6450: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6451: - b - optional vector of right-hand side, that will be adjusted by provided solution
6453: Level: intermediate
6455: Notes:
6456: See `MatZeroRows()` for details on how this routine operates.
6458: The grid coordinates are across the entire grid, not just the local portion
6460: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6461: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6462: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6463: `DM_BOUNDARY_PERIODIC` boundary type.
6465: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6466: a single value per point) you can skip filling those indices.
6468: Fortran Note:
6469: `idxm` and `idxn` should be declared as
6470: .vb
6471: MatStencil idxm(4, m)
6472: .ve
6473: and the values inserted using
6474: .vb
6475: idxm(MatStencil_i, 1) = i
6476: idxm(MatStencil_j, 1) = j
6477: idxm(MatStencil_k, 1) = k
6478: idxm(MatStencil_c, 1) = c
6479: etc
6480: .ve
6482: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6483: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6484: @*/
6485: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6486: {
6487: PetscInt dim = mat->stencil.dim;
6488: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6489: PetscInt *dims = mat->stencil.dims + 1;
6490: PetscInt *starts = mat->stencil.starts;
6491: PetscInt *dxm = (PetscInt *)rows;
6492: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6494: PetscFunctionBegin;
6497: if (numRows) PetscAssertPointer(rows, 3);
6499: PetscCall(PetscMalloc1(numRows, &jdxm));
6500: for (i = 0; i < numRows; ++i) {
6501: /* Skip unused dimensions (they are ordered k, j, i, c) */
6502: for (j = 0; j < 3 - sdim; ++j) dxm++;
6503: /* Local index in X dir */
6504: tmp = *dxm++ - starts[0];
6505: /* Loop over remaining dimensions */
6506: for (j = 0; j < dim - 1; ++j) {
6507: /* If nonlocal, set index to be negative */
6508: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6509: /* Update local index */
6510: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6511: }
6512: /* Skip component slot if necessary */
6513: if (mat->stencil.noc) dxm++;
6514: /* Local row number */
6515: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6516: }
6517: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6518: PetscCall(PetscFree(jdxm));
6519: PetscFunctionReturn(PETSC_SUCCESS);
6520: }
6522: /*@
6523: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6524: of a set of rows and columns of a matrix.
6526: Collective
6528: Input Parameters:
6529: + mat - the matrix
6530: . numRows - the number of rows/columns to remove
6531: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6532: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6533: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6534: - b - optional vector of right-hand side, that will be adjusted by provided solution
6536: Level: intermediate
6538: Notes:
6539: See `MatZeroRowsColumns()` for details on how this routine operates.
6541: The grid coordinates are across the entire grid, not just the local portion
6543: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6544: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6545: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6546: `DM_BOUNDARY_PERIODIC` boundary type.
6548: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6549: a single value per point) you can skip filling those indices.
6551: Fortran Note:
6552: `idxm` and `idxn` should be declared as
6553: .vb
6554: MatStencil idxm(4, m)
6555: .ve
6556: and the values inserted using
6557: .vb
6558: idxm(MatStencil_i, 1) = i
6559: idxm(MatStencil_j, 1) = j
6560: idxm(MatStencil_k, 1) = k
6561: idxm(MatStencil_c, 1) = c
6562: etc
6563: .ve
6565: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6566: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6567: @*/
6568: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6569: {
6570: PetscInt dim = mat->stencil.dim;
6571: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6572: PetscInt *dims = mat->stencil.dims + 1;
6573: PetscInt *starts = mat->stencil.starts;
6574: PetscInt *dxm = (PetscInt *)rows;
6575: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6577: PetscFunctionBegin;
6580: if (numRows) PetscAssertPointer(rows, 3);
6582: PetscCall(PetscMalloc1(numRows, &jdxm));
6583: for (i = 0; i < numRows; ++i) {
6584: /* Skip unused dimensions (they are ordered k, j, i, c) */
6585: for (j = 0; j < 3 - sdim; ++j) dxm++;
6586: /* Local index in X dir */
6587: tmp = *dxm++ - starts[0];
6588: /* Loop over remaining dimensions */
6589: for (j = 0; j < dim - 1; ++j) {
6590: /* If nonlocal, set index to be negative */
6591: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6592: /* Update local index */
6593: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6594: }
6595: /* Skip component slot if necessary */
6596: if (mat->stencil.noc) dxm++;
6597: /* Local row number */
6598: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6599: }
6600: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6601: PetscCall(PetscFree(jdxm));
6602: PetscFunctionReturn(PETSC_SUCCESS);
6603: }
6605: /*@
6606: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6607: of a set of rows of a matrix; using local numbering of rows.
6609: Collective
6611: Input Parameters:
6612: + mat - the matrix
6613: . numRows - the number of rows to remove
6614: . rows - the local row indices
6615: . diag - value put in all diagonals of eliminated rows
6616: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6617: - b - optional vector of right-hand side, that will be adjusted by provided solution
6619: Level: intermediate
6621: Notes:
6622: Before calling `MatZeroRowsLocal()`, the user must first set the
6623: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6625: See `MatZeroRows()` for details on how this routine operates.
6627: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6628: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6629: @*/
6630: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6631: {
6632: PetscFunctionBegin;
6635: if (numRows) PetscAssertPointer(rows, 3);
6636: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6637: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6638: MatCheckPreallocated(mat, 1);
6640: if (mat->ops->zerorowslocal) {
6641: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6642: } else {
6643: IS is, newis;
6644: const PetscInt *newRows;
6646: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6647: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6648: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6649: PetscCall(ISGetIndices(newis, &newRows));
6650: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6651: PetscCall(ISRestoreIndices(newis, &newRows));
6652: PetscCall(ISDestroy(&newis));
6653: PetscCall(ISDestroy(&is));
6654: }
6655: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6656: PetscFunctionReturn(PETSC_SUCCESS);
6657: }
6659: /*@
6660: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6661: of a set of rows of a matrix; using local numbering of rows.
6663: Collective
6665: Input Parameters:
6666: + mat - the matrix
6667: . is - index set of rows to remove
6668: . diag - value put in all diagonals of eliminated rows
6669: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6670: - b - optional vector of right-hand side, that will be adjusted by provided solution
6672: Level: intermediate
6674: Notes:
6675: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6676: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6678: See `MatZeroRows()` for details on how this routine operates.
6680: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6681: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6682: @*/
6683: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6684: {
6685: PetscInt numRows;
6686: const PetscInt *rows;
6688: PetscFunctionBegin;
6692: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6693: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6694: MatCheckPreallocated(mat, 1);
6696: PetscCall(ISGetLocalSize(is, &numRows));
6697: PetscCall(ISGetIndices(is, &rows));
6698: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6699: PetscCall(ISRestoreIndices(is, &rows));
6700: PetscFunctionReturn(PETSC_SUCCESS);
6701: }
6703: /*@
6704: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6705: of a set of rows and columns of a matrix; using local numbering of rows.
6707: Collective
6709: Input Parameters:
6710: + mat - the matrix
6711: . numRows - the number of rows to remove
6712: . rows - the global row indices
6713: . diag - value put in all diagonals of eliminated rows
6714: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6715: - b - optional vector of right-hand side, that will be adjusted by provided solution
6717: Level: intermediate
6719: Notes:
6720: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6721: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6723: See `MatZeroRowsColumns()` for details on how this routine operates.
6725: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6726: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6727: @*/
6728: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6729: {
6730: IS is, newis;
6731: const PetscInt *newRows;
6733: PetscFunctionBegin;
6736: if (numRows) PetscAssertPointer(rows, 3);
6737: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6738: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6739: MatCheckPreallocated(mat, 1);
6741: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6742: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6743: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6744: PetscCall(ISGetIndices(newis, &newRows));
6745: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6746: PetscCall(ISRestoreIndices(newis, &newRows));
6747: PetscCall(ISDestroy(&newis));
6748: PetscCall(ISDestroy(&is));
6749: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6750: PetscFunctionReturn(PETSC_SUCCESS);
6751: }
6753: /*@
6754: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6755: of a set of rows and columns of a matrix; using local numbering of rows.
6757: Collective
6759: Input Parameters:
6760: + mat - the matrix
6761: . is - index set of rows to remove
6762: . diag - value put in all diagonals of eliminated rows
6763: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6764: - b - optional vector of right-hand side, that will be adjusted by provided solution
6766: Level: intermediate
6768: Notes:
6769: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6770: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6772: See `MatZeroRowsColumns()` for details on how this routine operates.
6774: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6775: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6776: @*/
6777: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6778: {
6779: PetscInt numRows;
6780: const PetscInt *rows;
6782: PetscFunctionBegin;
6786: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6787: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6788: MatCheckPreallocated(mat, 1);
6790: PetscCall(ISGetLocalSize(is, &numRows));
6791: PetscCall(ISGetIndices(is, &rows));
6792: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6793: PetscCall(ISRestoreIndices(is, &rows));
6794: PetscFunctionReturn(PETSC_SUCCESS);
6795: }
6797: /*@
6798: MatGetSize - Returns the numbers of rows and columns in a matrix.
6800: Not Collective
6802: Input Parameter:
6803: . mat - the matrix
6805: Output Parameters:
6806: + m - the number of global rows
6807: - n - the number of global columns
6809: Level: beginner
6811: Note:
6812: Both output parameters can be `NULL` on input.
6814: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6815: @*/
6816: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6817: {
6818: PetscFunctionBegin;
6820: if (m) *m = mat->rmap->N;
6821: if (n) *n = mat->cmap->N;
6822: PetscFunctionReturn(PETSC_SUCCESS);
6823: }
6825: /*@
6826: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6827: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6829: Not Collective
6831: Input Parameter:
6832: . mat - the matrix
6834: Output Parameters:
6835: + m - the number of local rows, use `NULL` to not obtain this value
6836: - n - the number of local columns, use `NULL` to not obtain this value
6838: Level: beginner
6840: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6841: @*/
6842: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6843: {
6844: PetscFunctionBegin;
6846: if (m) PetscAssertPointer(m, 2);
6847: if (n) PetscAssertPointer(n, 3);
6848: if (m) *m = mat->rmap->n;
6849: if (n) *n = mat->cmap->n;
6850: PetscFunctionReturn(PETSC_SUCCESS);
6851: }
6853: /*@
6854: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6855: vector one multiplies this matrix by that are owned by this processor.
6857: Not Collective, unless matrix has not been allocated, then collective
6859: Input Parameter:
6860: . mat - the matrix
6862: Output Parameters:
6863: + m - the global index of the first local column, use `NULL` to not obtain this value
6864: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6866: Level: developer
6868: Notes:
6869: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6871: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6872: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6874: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6875: the local values in the matrix.
6877: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6878: Layouts](sec_matlayout) for details on matrix layouts.
6880: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6881: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6882: @*/
6883: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6884: {
6885: PetscFunctionBegin;
6888: if (m) PetscAssertPointer(m, 2);
6889: if (n) PetscAssertPointer(n, 3);
6890: MatCheckPreallocated(mat, 1);
6891: if (m) *m = mat->cmap->rstart;
6892: if (n) *n = mat->cmap->rend;
6893: PetscFunctionReturn(PETSC_SUCCESS);
6894: }
6896: /*@
6897: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6898: this MPI process.
6900: Not Collective
6902: Input Parameter:
6903: . mat - the matrix
6905: Output Parameters:
6906: + m - the global index of the first local row, use `NULL` to not obtain this value
6907: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6909: Level: beginner
6911: Notes:
6912: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6914: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6915: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6917: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6918: the local values in the matrix.
6920: The high argument is one more than the last element stored locally.
6922: For all matrices it returns the range of matrix rows associated with rows of a vector that
6923: would contain the result of a matrix vector product with this matrix. See [Matrix
6924: Layouts](sec_matlayout) for details on matrix layouts.
6926: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
6927: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6928: @*/
6929: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6930: {
6931: PetscFunctionBegin;
6934: if (m) PetscAssertPointer(m, 2);
6935: if (n) PetscAssertPointer(n, 3);
6936: MatCheckPreallocated(mat, 1);
6937: if (m) *m = mat->rmap->rstart;
6938: if (n) *n = mat->rmap->rend;
6939: PetscFunctionReturn(PETSC_SUCCESS);
6940: }
6942: /*@C
6943: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6944: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6946: Not Collective, unless matrix has not been allocated
6948: Input Parameter:
6949: . mat - the matrix
6951: Output Parameter:
6952: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
6953: where `size` is the number of MPI processes used by `mat`
6955: Level: beginner
6957: Notes:
6958: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6960: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6961: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6963: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6964: the local values in the matrix.
6966: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6967: would contain the result of a matrix vector product with this matrix. See [Matrix
6968: Layouts](sec_matlayout) for details on matrix layouts.
6970: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6971: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
6972: `DMDAGetGhostCorners()`, `DM`
6973: @*/
6974: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
6975: {
6976: PetscFunctionBegin;
6979: MatCheckPreallocated(mat, 1);
6980: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6981: PetscFunctionReturn(PETSC_SUCCESS);
6982: }
6984: /*@C
6985: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
6986: vector one multiplies this vector by that are owned by each processor.
6988: Not Collective, unless matrix has not been allocated
6990: Input Parameter:
6991: . mat - the matrix
6993: Output Parameter:
6994: . ranges - start of each processors portion plus one more than the total length at the end
6996: Level: beginner
6998: Notes:
6999: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7001: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7002: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7004: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7005: the local values in the matrix.
7007: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
7008: Layouts](sec_matlayout) for details on matrix layouts.
7010: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
7011: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
7012: `DMDAGetGhostCorners()`, `DM`
7013: @*/
7014: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
7015: {
7016: PetscFunctionBegin;
7019: MatCheckPreallocated(mat, 1);
7020: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
7021: PetscFunctionReturn(PETSC_SUCCESS);
7022: }
7024: /*@
7025: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7027: Not Collective
7029: Input Parameter:
7030: . A - matrix
7032: Output Parameters:
7033: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7034: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7036: Level: intermediate
7038: Note:
7039: You should call `ISDestroy()` on the returned `IS`
7041: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7042: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7043: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7044: details on matrix layouts.
7046: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7047: @*/
7048: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7049: {
7050: PetscErrorCode (*f)(Mat, IS *, IS *);
7052: PetscFunctionBegin;
7055: MatCheckPreallocated(A, 1);
7056: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7057: if (f) {
7058: PetscCall((*f)(A, rows, cols));
7059: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7060: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7061: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7062: }
7063: PetscFunctionReturn(PETSC_SUCCESS);
7064: }
7066: /*@
7067: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7068: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7069: to complete the factorization.
7071: Collective
7073: Input Parameters:
7074: + fact - the factorized matrix obtained with `MatGetFactor()`
7075: . mat - the matrix
7076: . row - row permutation
7077: . col - column permutation
7078: - info - structure containing
7079: .vb
7080: levels - number of levels of fill.
7081: expected fill - as ratio of original fill.
7082: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7083: missing diagonal entries)
7084: .ve
7086: Level: developer
7088: Notes:
7089: See [Matrix Factorization](sec_matfactor) for additional information.
7091: Most users should employ the `KSP` interface for linear solvers
7092: instead of working directly with matrix algebra routines such as this.
7093: See, e.g., `KSPCreate()`.
7095: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7097: Fortran Note:
7098: A valid (non-null) `info` argument must be provided
7100: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
7101: `MatGetOrdering()`, `MatFactorInfo`
7102: @*/
7103: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7104: {
7105: PetscFunctionBegin;
7110: PetscAssertPointer(info, 5);
7111: PetscAssertPointer(fact, 1);
7112: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7113: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7114: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7115: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7116: MatCheckPreallocated(mat, 2);
7118: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7119: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7120: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7121: PetscFunctionReturn(PETSC_SUCCESS);
7122: }
7124: /*@
7125: MatICCFactorSymbolic - Performs symbolic incomplete
7126: Cholesky factorization for a symmetric matrix. Use
7127: `MatCholeskyFactorNumeric()` to complete the factorization.
7129: Collective
7131: Input Parameters:
7132: + fact - the factorized matrix obtained with `MatGetFactor()`
7133: . mat - the matrix to be factored
7134: . perm - row and column permutation
7135: - info - structure containing
7136: .vb
7137: levels - number of levels of fill.
7138: expected fill - as ratio of original fill.
7139: .ve
7141: Level: developer
7143: Notes:
7144: Most users should employ the `KSP` interface for linear solvers
7145: instead of working directly with matrix algebra routines such as this.
7146: See, e.g., `KSPCreate()`.
7148: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7150: Fortran Note:
7151: A valid (non-null) `info` argument must be provided
7153: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7154: @*/
7155: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7156: {
7157: PetscFunctionBegin;
7161: PetscAssertPointer(info, 4);
7162: PetscAssertPointer(fact, 1);
7163: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7164: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7165: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7166: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7167: MatCheckPreallocated(mat, 2);
7169: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7170: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7171: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7172: PetscFunctionReturn(PETSC_SUCCESS);
7173: }
7175: /*@C
7176: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7177: points to an array of valid matrices, they may be reused to store the new
7178: submatrices.
7180: Collective
7182: Input Parameters:
7183: + mat - the matrix
7184: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7185: . irow - index set of rows to extract
7186: . icol - index set of columns to extract
7187: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7189: Output Parameter:
7190: . submat - the array of submatrices
7192: Level: advanced
7194: Notes:
7195: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7196: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7197: to extract a parallel submatrix.
7199: Some matrix types place restrictions on the row and column
7200: indices, such as that they be sorted or that they be equal to each other.
7202: The index sets may not have duplicate entries.
7204: When extracting submatrices from a parallel matrix, each processor can
7205: form a different submatrix by setting the rows and columns of its
7206: individual index sets according to the local submatrix desired.
7208: When finished using the submatrices, the user should destroy
7209: them with `MatDestroySubMatrices()`.
7211: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7212: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7214: This routine creates the matrices in submat; you should NOT create them before
7215: calling it. It also allocates the array of matrix pointers submat.
7217: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7218: request one row/column in a block, they must request all rows/columns that are in
7219: that block. For example, if the block size is 2 you cannot request just row 0 and
7220: column 0.
7222: Fortran Note:
7223: .vb
7224: Mat, pointer :: submat(:)
7225: .ve
7227: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7228: @*/
7229: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7230: {
7231: PetscInt i;
7232: PetscBool eq;
7234: PetscFunctionBegin;
7237: if (n) {
7238: PetscAssertPointer(irow, 3);
7240: PetscAssertPointer(icol, 4);
7242: }
7243: PetscAssertPointer(submat, 6);
7244: if (n && scall == MAT_REUSE_MATRIX) {
7245: PetscAssertPointer(*submat, 6);
7247: }
7248: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7249: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7250: MatCheckPreallocated(mat, 1);
7251: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7252: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7253: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7254: for (i = 0; i < n; i++) {
7255: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7256: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7257: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7258: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7259: if (mat->boundtocpu && mat->bindingpropagates) {
7260: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7261: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7262: }
7263: #endif
7264: }
7265: PetscFunctionReturn(PETSC_SUCCESS);
7266: }
7268: /*@C
7269: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of `mat` (by pairs of `IS` that may live on subcomms).
7271: Collective
7273: Input Parameters:
7274: + mat - the matrix
7275: . n - the number of submatrixes to be extracted
7276: . irow - index set of rows to extract
7277: . icol - index set of columns to extract
7278: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7280: Output Parameter:
7281: . submat - the array of submatrices
7283: Level: advanced
7285: Note:
7286: This is used by `PCGASM`
7288: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7289: @*/
7290: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7291: {
7292: PetscInt i;
7293: PetscBool eq;
7295: PetscFunctionBegin;
7298: if (n) {
7299: PetscAssertPointer(irow, 3);
7301: PetscAssertPointer(icol, 4);
7303: }
7304: PetscAssertPointer(submat, 6);
7305: if (n && scall == MAT_REUSE_MATRIX) {
7306: PetscAssertPointer(*submat, 6);
7308: }
7309: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7310: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7311: MatCheckPreallocated(mat, 1);
7313: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7314: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7315: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7316: for (i = 0; i < n; i++) {
7317: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7318: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7319: }
7320: PetscFunctionReturn(PETSC_SUCCESS);
7321: }
7323: /*@C
7324: MatDestroyMatrices - Destroys an array of matrices
7326: Collective
7328: Input Parameters:
7329: + n - the number of local matrices
7330: - mat - the matrices (this is a pointer to the array of matrices)
7332: Level: advanced
7334: Notes:
7335: Frees not only the matrices, but also the array that contains the matrices
7337: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7339: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7340: @*/
7341: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7342: {
7343: PetscInt i;
7345: PetscFunctionBegin;
7346: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7347: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7348: PetscAssertPointer(mat, 2);
7350: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7352: /* memory is allocated even if n = 0 */
7353: PetscCall(PetscFree(*mat));
7354: PetscFunctionReturn(PETSC_SUCCESS);
7355: }
7357: /*@C
7358: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7360: Collective
7362: Input Parameters:
7363: + n - the number of local matrices
7364: - mat - the matrices (this is a pointer to the array of matrices, to match the calling sequence of `MatCreateSubMatrices()`)
7366: Level: advanced
7368: Note:
7369: Frees not only the matrices, but also the array that contains the matrices
7371: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7372: @*/
7373: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7374: {
7375: Mat mat0;
7377: PetscFunctionBegin;
7378: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7379: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7380: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7381: PetscAssertPointer(mat, 2);
7383: mat0 = (*mat)[0];
7384: if (mat0 && mat0->ops->destroysubmatrices) {
7385: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7386: } else {
7387: PetscCall(MatDestroyMatrices(n, mat));
7388: }
7389: PetscFunctionReturn(PETSC_SUCCESS);
7390: }
7392: /*@
7393: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7395: Collective
7397: Input Parameter:
7398: . mat - the matrix
7400: Output Parameter:
7401: . matstruct - the sequential matrix with the nonzero structure of `mat`
7403: Level: developer
7405: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7406: @*/
7407: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7408: {
7409: PetscFunctionBegin;
7411: PetscAssertPointer(matstruct, 2);
7414: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7415: MatCheckPreallocated(mat, 1);
7417: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7418: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7419: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7420: PetscFunctionReturn(PETSC_SUCCESS);
7421: }
7423: /*@C
7424: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7426: Collective
7428: Input Parameter:
7429: . mat - the matrix
7431: Level: advanced
7433: Note:
7434: This is not needed, one can just call `MatDestroy()`
7436: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7437: @*/
7438: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7439: {
7440: PetscFunctionBegin;
7441: PetscAssertPointer(mat, 1);
7442: PetscCall(MatDestroy(mat));
7443: PetscFunctionReturn(PETSC_SUCCESS);
7444: }
7446: /*@
7447: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7448: replaces the index sets by larger ones that represent submatrices with
7449: additional overlap.
7451: Collective
7453: Input Parameters:
7454: + mat - the matrix
7455: . n - the number of index sets
7456: . is - the array of index sets (these index sets will changed during the call)
7457: - ov - the additional overlap requested
7459: Options Database Key:
7460: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7462: Level: developer
7464: Note:
7465: The computed overlap preserves the matrix block sizes when the blocks are square.
7466: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7467: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7469: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7470: @*/
7471: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7472: {
7473: PetscInt i, bs, cbs;
7475: PetscFunctionBegin;
7479: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7480: if (n) {
7481: PetscAssertPointer(is, 3);
7483: }
7484: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7485: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7486: MatCheckPreallocated(mat, 1);
7488: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7489: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7490: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7491: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7492: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7493: if (bs == cbs) {
7494: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7495: }
7496: PetscFunctionReturn(PETSC_SUCCESS);
7497: }
7499: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7501: /*@
7502: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7503: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7504: additional overlap.
7506: Collective
7508: Input Parameters:
7509: + mat - the matrix
7510: . n - the number of index sets
7511: . is - the array of index sets (these index sets will changed during the call)
7512: - ov - the additional overlap requested
7514: ` Options Database Key:
7515: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7517: Level: developer
7519: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7520: @*/
7521: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7522: {
7523: PetscInt i;
7525: PetscFunctionBegin;
7528: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7529: if (n) {
7530: PetscAssertPointer(is, 3);
7532: }
7533: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7534: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7535: MatCheckPreallocated(mat, 1);
7536: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7537: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7538: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7539: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7540: PetscFunctionReturn(PETSC_SUCCESS);
7541: }
7543: /*@
7544: MatGetBlockSize - Returns the matrix block size.
7546: Not Collective
7548: Input Parameter:
7549: . mat - the matrix
7551: Output Parameter:
7552: . bs - block size
7554: Level: intermediate
7556: Notes:
7557: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7559: If the block size has not been set yet this routine returns 1.
7561: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7562: @*/
7563: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7564: {
7565: PetscFunctionBegin;
7567: PetscAssertPointer(bs, 2);
7568: *bs = mat->rmap->bs;
7569: PetscFunctionReturn(PETSC_SUCCESS);
7570: }
7572: /*@
7573: MatGetBlockSizes - Returns the matrix block row and column sizes.
7575: Not Collective
7577: Input Parameter:
7578: . mat - the matrix
7580: Output Parameters:
7581: + rbs - row block size
7582: - cbs - column block size
7584: Level: intermediate
7586: Notes:
7587: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7588: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7590: If a block size has not been set yet this routine returns 1.
7592: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7593: @*/
7594: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7595: {
7596: PetscFunctionBegin;
7598: if (rbs) PetscAssertPointer(rbs, 2);
7599: if (cbs) PetscAssertPointer(cbs, 3);
7600: if (rbs) *rbs = mat->rmap->bs;
7601: if (cbs) *cbs = mat->cmap->bs;
7602: PetscFunctionReturn(PETSC_SUCCESS);
7603: }
7605: /*@
7606: MatSetBlockSize - Sets the matrix block size.
7608: Logically Collective
7610: Input Parameters:
7611: + mat - the matrix
7612: - bs - block size
7614: Level: intermediate
7616: Notes:
7617: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7618: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7620: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7621: is compatible with the matrix local sizes.
7623: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7624: @*/
7625: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7626: {
7627: PetscFunctionBegin;
7630: PetscCall(MatSetBlockSizes(mat, bs, bs));
7631: PetscFunctionReturn(PETSC_SUCCESS);
7632: }
7634: typedef struct {
7635: PetscInt n;
7636: IS *is;
7637: Mat *mat;
7638: PetscObjectState nonzerostate;
7639: Mat C;
7640: } EnvelopeData;
7642: static PetscErrorCode EnvelopeDataDestroy(void **ptr)
7643: {
7644: EnvelopeData *edata = (EnvelopeData *)*ptr;
7646: PetscFunctionBegin;
7647: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7648: PetscCall(PetscFree(edata->is));
7649: PetscCall(PetscFree(edata));
7650: PetscFunctionReturn(PETSC_SUCCESS);
7651: }
7653: /*@
7654: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7655: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7657: Collective
7659: Input Parameter:
7660: . mat - the matrix
7662: Level: intermediate
7664: Notes:
7665: There can be zeros within the blocks
7667: The blocks can overlap between processes, including laying on more than two processes
7669: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7670: @*/
7671: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7672: {
7673: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7674: PetscInt *diag, *odiag, sc;
7675: VecScatter scatter;
7676: PetscScalar *seqv;
7677: const PetscScalar *parv;
7678: const PetscInt *ia, *ja;
7679: PetscBool set, flag, done;
7680: Mat AA = mat, A;
7681: MPI_Comm comm;
7682: PetscMPIInt rank, size, tag;
7683: MPI_Status status;
7684: PetscContainer container;
7685: EnvelopeData *edata;
7686: Vec seq, par;
7687: IS isglobal;
7689: PetscFunctionBegin;
7691: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7692: if (!set || !flag) {
7693: /* TODO: only needs nonzero structure of transpose */
7694: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7695: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7696: }
7697: PetscCall(MatAIJGetLocalMat(AA, &A));
7698: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7699: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7701: PetscCall(MatGetLocalSize(mat, &n, NULL));
7702: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7703: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7704: PetscCallMPI(MPI_Comm_size(comm, &size));
7705: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7707: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7709: if (rank > 0) {
7710: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7711: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7712: }
7713: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7714: for (i = 0; i < n; i++) {
7715: env = PetscMax(env, ja[ia[i + 1] - 1]);
7716: II = rstart + i;
7717: if (env == II) {
7718: starts[lblocks] = tbs;
7719: sizes[lblocks++] = 1 + II - tbs;
7720: tbs = 1 + II;
7721: }
7722: }
7723: if (rank < size - 1) {
7724: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7725: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7726: }
7728: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7729: if (!set || !flag) PetscCall(MatDestroy(&AA));
7730: PetscCall(MatDestroy(&A));
7732: PetscCall(PetscNew(&edata));
7733: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7734: edata->n = lblocks;
7735: /* create IS needed for extracting blocks from the original matrix */
7736: PetscCall(PetscMalloc1(lblocks, &edata->is));
7737: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7739: /* Create the resulting inverse matrix nonzero structure with preallocation information */
7740: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7741: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7742: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7743: PetscCall(MatSetType(edata->C, MATAIJ));
7745: /* Communicate the start and end of each row, from each block to the correct rank */
7746: /* TODO: Use PetscSF instead of VecScatter */
7747: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7748: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7749: PetscCall(VecGetArrayWrite(seq, &seqv));
7750: for (PetscInt i = 0; i < lblocks; i++) {
7751: for (PetscInt j = 0; j < sizes[i]; j++) {
7752: seqv[cnt] = starts[i];
7753: seqv[cnt + 1] = starts[i] + sizes[i];
7754: cnt += 2;
7755: }
7756: }
7757: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7758: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7759: sc -= cnt;
7760: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7761: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7762: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7763: PetscCall(ISDestroy(&isglobal));
7764: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7765: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7766: PetscCall(VecScatterDestroy(&scatter));
7767: PetscCall(VecDestroy(&seq));
7768: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7769: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7770: PetscCall(VecGetArrayRead(par, &parv));
7771: cnt = 0;
7772: PetscCall(MatGetSize(mat, NULL, &n));
7773: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7774: PetscInt start, end, d = 0, od = 0;
7776: start = (PetscInt)PetscRealPart(parv[cnt]);
7777: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7778: cnt += 2;
7780: if (start < cstart) {
7781: od += cstart - start + n - cend;
7782: d += cend - cstart;
7783: } else if (start < cend) {
7784: od += n - cend;
7785: d += cend - start;
7786: } else od += n - start;
7787: if (end <= cstart) {
7788: od -= cstart - end + n - cend;
7789: d -= cend - cstart;
7790: } else if (end < cend) {
7791: od -= n - cend;
7792: d -= cend - end;
7793: } else od -= n - end;
7795: odiag[i] = od;
7796: diag[i] = d;
7797: }
7798: PetscCall(VecRestoreArrayRead(par, &parv));
7799: PetscCall(VecDestroy(&par));
7800: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7801: PetscCall(PetscFree2(diag, odiag));
7802: PetscCall(PetscFree2(sizes, starts));
7804: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7805: PetscCall(PetscContainerSetPointer(container, edata));
7806: PetscCall(PetscContainerSetCtxDestroy(container, EnvelopeDataDestroy));
7807: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7808: PetscCall(PetscObjectDereference((PetscObject)container));
7809: PetscFunctionReturn(PETSC_SUCCESS);
7810: }
7812: /*@
7813: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7815: Collective
7817: Input Parameters:
7818: + A - the matrix
7819: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7821: Output Parameter:
7822: . C - matrix with inverted block diagonal of `A`
7824: Level: advanced
7826: Note:
7827: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7829: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7830: @*/
7831: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7832: {
7833: PetscContainer container;
7834: EnvelopeData *edata;
7835: PetscObjectState nonzerostate;
7837: PetscFunctionBegin;
7838: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7839: if (!container) {
7840: PetscCall(MatComputeVariableBlockEnvelope(A));
7841: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7842: }
7843: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7844: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7845: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7846: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7848: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7849: *C = edata->C;
7851: for (PetscInt i = 0; i < edata->n; i++) {
7852: Mat D;
7853: PetscScalar *dvalues;
7855: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7856: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7857: PetscCall(MatSeqDenseInvert(D));
7858: PetscCall(MatDenseGetArray(D, &dvalues));
7859: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7860: PetscCall(MatDestroy(&D));
7861: }
7862: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7863: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7864: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7865: PetscFunctionReturn(PETSC_SUCCESS);
7866: }
7868: /*@
7869: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7871: Not Collective
7873: Input Parameters:
7874: + mat - the matrix
7875: . nblocks - the number of blocks on this process, each block can only exist on a single process
7876: - bsizes - the block sizes
7878: Level: intermediate
7880: Notes:
7881: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7883: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7885: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7886: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7887: @*/
7888: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
7889: {
7890: PetscInt ncnt = 0, nlocal;
7892: PetscFunctionBegin;
7894: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7895: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
7896: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
7897: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7898: PetscCall(PetscFree(mat->bsizes));
7899: mat->nblocks = nblocks;
7900: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7901: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7902: PetscFunctionReturn(PETSC_SUCCESS);
7903: }
7905: /*@C
7906: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7908: Not Collective; No Fortran Support
7910: Input Parameter:
7911: . mat - the matrix
7913: Output Parameters:
7914: + nblocks - the number of blocks on this process
7915: - bsizes - the block sizes
7917: Level: intermediate
7919: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7920: @*/
7921: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
7922: {
7923: PetscFunctionBegin;
7925: if (nblocks) *nblocks = mat->nblocks;
7926: if (bsizes) *bsizes = mat->bsizes;
7927: PetscFunctionReturn(PETSC_SUCCESS);
7928: }
7930: /*@
7931: MatSetBlockSizes - Sets the matrix block row and column sizes.
7933: Logically Collective
7935: Input Parameters:
7936: + mat - the matrix
7937: . rbs - row block size
7938: - cbs - column block size
7940: Level: intermediate
7942: Notes:
7943: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7944: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7945: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7947: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7948: are compatible with the matrix local sizes.
7950: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
7952: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7953: @*/
7954: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
7955: {
7956: PetscFunctionBegin;
7960: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
7961: if (mat->rmap->refcnt) {
7962: ISLocalToGlobalMapping l2g = NULL;
7963: PetscLayout nmap = NULL;
7965: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
7966: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
7967: PetscCall(PetscLayoutDestroy(&mat->rmap));
7968: mat->rmap = nmap;
7969: mat->rmap->mapping = l2g;
7970: }
7971: if (mat->cmap->refcnt) {
7972: ISLocalToGlobalMapping l2g = NULL;
7973: PetscLayout nmap = NULL;
7975: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
7976: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
7977: PetscCall(PetscLayoutDestroy(&mat->cmap));
7978: mat->cmap = nmap;
7979: mat->cmap->mapping = l2g;
7980: }
7981: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
7982: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
7983: PetscFunctionReturn(PETSC_SUCCESS);
7984: }
7986: /*@
7987: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7989: Logically Collective
7991: Input Parameters:
7992: + mat - the matrix
7993: . fromRow - matrix from which to copy row block size
7994: - fromCol - matrix from which to copy column block size (can be same as fromRow)
7996: Level: developer
7998: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
7999: @*/
8000: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
8001: {
8002: PetscFunctionBegin;
8006: PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
8007: PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
8008: PetscFunctionReturn(PETSC_SUCCESS);
8009: }
8011: /*@
8012: MatResidual - Default routine to calculate the residual r = b - Ax
8014: Collective
8016: Input Parameters:
8017: + mat - the matrix
8018: . b - the right-hand-side
8019: - x - the approximate solution
8021: Output Parameter:
8022: . r - location to store the residual
8024: Level: developer
8026: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8027: @*/
8028: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8029: {
8030: PetscFunctionBegin;
8036: MatCheckPreallocated(mat, 1);
8037: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8038: if (!mat->ops->residual) {
8039: PetscCall(MatMult(mat, x, r));
8040: PetscCall(VecAYPX(r, -1.0, b));
8041: } else {
8042: PetscUseTypeMethod(mat, residual, b, x, r);
8043: }
8044: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8045: PetscFunctionReturn(PETSC_SUCCESS);
8046: }
8048: /*@C
8049: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8051: Collective
8053: Input Parameters:
8054: + mat - the matrix
8055: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8056: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8057: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8058: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8059: always used.
8061: Output Parameters:
8062: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8063: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8064: . ja - the column indices, use `NULL` if not needed
8065: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8066: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8068: Level: developer
8070: Notes:
8071: You CANNOT change any of the ia[] or ja[] values.
8073: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8075: Fortran Notes:
8076: Use
8077: .vb
8078: PetscInt, pointer :: ia(:),ja(:)
8079: call MatGetRowIJ(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8080: ! Access the ith and jth entries via ia(i) and ja(j)
8081: .ve
8083: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8084: @*/
8085: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8086: {
8087: PetscFunctionBegin;
8090: if (n) PetscAssertPointer(n, 5);
8091: if (ia) PetscAssertPointer(ia, 6);
8092: if (ja) PetscAssertPointer(ja, 7);
8093: if (done) PetscAssertPointer(done, 8);
8094: MatCheckPreallocated(mat, 1);
8095: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8096: else {
8097: if (done) *done = PETSC_TRUE;
8098: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8099: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8100: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8101: }
8102: PetscFunctionReturn(PETSC_SUCCESS);
8103: }
8105: /*@C
8106: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8108: Collective
8110: Input Parameters:
8111: + mat - the matrix
8112: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8113: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8114: symmetrized
8115: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8116: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8117: always used.
8119: Output Parameters:
8120: + n - number of columns in the (possibly compressed) matrix
8121: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8122: . ja - the row indices
8123: - done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8125: Level: developer
8127: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8128: @*/
8129: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8130: {
8131: PetscFunctionBegin;
8134: PetscAssertPointer(n, 5);
8135: if (ia) PetscAssertPointer(ia, 6);
8136: if (ja) PetscAssertPointer(ja, 7);
8137: PetscAssertPointer(done, 8);
8138: MatCheckPreallocated(mat, 1);
8139: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8140: else {
8141: *done = PETSC_TRUE;
8142: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8143: }
8144: PetscFunctionReturn(PETSC_SUCCESS);
8145: }
8147: /*@C
8148: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8150: Collective
8152: Input Parameters:
8153: + mat - the matrix
8154: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8155: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8156: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8157: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8158: always used.
8159: . n - size of (possibly compressed) matrix
8160: . ia - the row pointers
8161: - ja - the column indices
8163: Output Parameter:
8164: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8166: Level: developer
8168: Note:
8169: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8170: us of the array after it has been restored. If you pass `NULL`, it will
8171: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8173: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8174: @*/
8175: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8176: {
8177: PetscFunctionBegin;
8180: if (ia) PetscAssertPointer(ia, 6);
8181: if (ja) PetscAssertPointer(ja, 7);
8182: if (done) PetscAssertPointer(done, 8);
8183: MatCheckPreallocated(mat, 1);
8185: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8186: else {
8187: if (done) *done = PETSC_TRUE;
8188: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8189: if (n) *n = 0;
8190: if (ia) *ia = NULL;
8191: if (ja) *ja = NULL;
8192: }
8193: PetscFunctionReturn(PETSC_SUCCESS);
8194: }
8196: /*@C
8197: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8199: Collective
8201: Input Parameters:
8202: + mat - the matrix
8203: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8204: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8205: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8206: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8207: always used.
8209: Output Parameters:
8210: + n - size of (possibly compressed) matrix
8211: . ia - the column pointers
8212: . ja - the row indices
8213: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8215: Level: developer
8217: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8218: @*/
8219: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8220: {
8221: PetscFunctionBegin;
8224: if (ia) PetscAssertPointer(ia, 6);
8225: if (ja) PetscAssertPointer(ja, 7);
8226: PetscAssertPointer(done, 8);
8227: MatCheckPreallocated(mat, 1);
8229: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8230: else {
8231: *done = PETSC_TRUE;
8232: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8233: if (n) *n = 0;
8234: if (ia) *ia = NULL;
8235: if (ja) *ja = NULL;
8236: }
8237: PetscFunctionReturn(PETSC_SUCCESS);
8238: }
8240: /*@
8241: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8242: `MatGetColumnIJ()`.
8244: Collective
8246: Input Parameters:
8247: + mat - the matrix
8248: . ncolors - maximum color value
8249: . n - number of entries in colorarray
8250: - colorarray - array indicating color for each column
8252: Output Parameter:
8253: . iscoloring - coloring generated using colorarray information
8255: Level: developer
8257: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8258: @*/
8259: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8260: {
8261: PetscFunctionBegin;
8264: PetscAssertPointer(colorarray, 4);
8265: PetscAssertPointer(iscoloring, 5);
8266: MatCheckPreallocated(mat, 1);
8268: if (!mat->ops->coloringpatch) {
8269: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8270: } else {
8271: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8272: }
8273: PetscFunctionReturn(PETSC_SUCCESS);
8274: }
8276: /*@
8277: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8279: Logically Collective
8281: Input Parameter:
8282: . mat - the factored matrix to be reset
8284: Level: developer
8286: Notes:
8287: This routine should be used only with factored matrices formed by in-place
8288: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8289: format). This option can save memory, for example, when solving nonlinear
8290: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8291: ILU(0) preconditioner.
8293: One can specify in-place ILU(0) factorization by calling
8294: .vb
8295: PCType(pc,PCILU);
8296: PCFactorSeUseInPlace(pc);
8297: .ve
8298: or by using the options -pc_type ilu -pc_factor_in_place
8300: In-place factorization ILU(0) can also be used as a local
8301: solver for the blocks within the block Jacobi or additive Schwarz
8302: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8303: for details on setting local solver options.
8305: Most users should employ the `KSP` interface for linear solvers
8306: instead of working directly with matrix algebra routines such as this.
8307: See, e.g., `KSPCreate()`.
8309: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8310: @*/
8311: PetscErrorCode MatSetUnfactored(Mat mat)
8312: {
8313: PetscFunctionBegin;
8316: MatCheckPreallocated(mat, 1);
8317: mat->factortype = MAT_FACTOR_NONE;
8318: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8319: PetscUseTypeMethod(mat, setunfactored);
8320: PetscFunctionReturn(PETSC_SUCCESS);
8321: }
8323: /*@
8324: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8325: as the original matrix.
8327: Collective
8329: Input Parameters:
8330: + mat - the original matrix
8331: . isrow - parallel `IS` containing the rows this processor should obtain
8332: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8333: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8335: Output Parameter:
8336: . newmat - the new submatrix, of the same type as the original matrix
8338: Level: advanced
8340: Notes:
8341: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8343: Some matrix types place restrictions on the row and column indices, such
8344: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8345: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8347: The index sets may not have duplicate entries.
8349: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8350: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8351: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8352: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8353: you are finished using it.
8355: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8356: the input matrix.
8358: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8360: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8361: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8363: Example usage:
8364: Consider the following 8x8 matrix with 34 non-zero values, that is
8365: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8366: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8367: as follows
8368: .vb
8369: 1 2 0 | 0 3 0 | 0 4
8370: Proc0 0 5 6 | 7 0 0 | 8 0
8371: 9 0 10 | 11 0 0 | 12 0
8372: -------------------------------------
8373: 13 0 14 | 15 16 17 | 0 0
8374: Proc1 0 18 0 | 19 20 21 | 0 0
8375: 0 0 0 | 22 23 0 | 24 0
8376: -------------------------------------
8377: Proc2 25 26 27 | 0 0 28 | 29 0
8378: 30 0 0 | 31 32 33 | 0 34
8379: .ve
8381: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8383: .vb
8384: 2 0 | 0 3 0 | 0
8385: Proc0 5 6 | 7 0 0 | 8
8386: -------------------------------
8387: Proc1 18 0 | 19 20 21 | 0
8388: -------------------------------
8389: Proc2 26 27 | 0 0 28 | 29
8390: 0 0 | 31 32 33 | 0
8391: .ve
8393: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8394: @*/
8395: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8396: {
8397: PetscMPIInt size;
8398: Mat *local;
8399: IS iscoltmp;
8400: PetscBool flg;
8402: PetscFunctionBegin;
8406: PetscAssertPointer(newmat, 5);
8409: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8410: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8412: MatCheckPreallocated(mat, 1);
8413: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8415: if (!iscol || isrow == iscol) {
8416: PetscBool stride;
8417: PetscMPIInt grabentirematrix = 0, grab;
8418: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8419: if (stride) {
8420: PetscInt first, step, n, rstart, rend;
8421: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8422: if (step == 1) {
8423: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8424: if (rstart == first) {
8425: PetscCall(ISGetLocalSize(isrow, &n));
8426: if (n == rend - rstart) grabentirematrix = 1;
8427: }
8428: }
8429: }
8430: PetscCallMPI(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8431: if (grab) {
8432: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8433: if (cll == MAT_INITIAL_MATRIX) {
8434: *newmat = mat;
8435: PetscCall(PetscObjectReference((PetscObject)mat));
8436: }
8437: PetscFunctionReturn(PETSC_SUCCESS);
8438: }
8439: }
8441: if (!iscol) {
8442: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8443: } else {
8444: iscoltmp = iscol;
8445: }
8447: /* if original matrix is on just one processor then use submatrix generated */
8448: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8449: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8450: goto setproperties;
8451: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8452: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8453: *newmat = *local;
8454: PetscCall(PetscFree(local));
8455: goto setproperties;
8456: } else if (!mat->ops->createsubmatrix) {
8457: /* Create a new matrix type that implements the operation using the full matrix */
8458: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8459: switch (cll) {
8460: case MAT_INITIAL_MATRIX:
8461: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8462: break;
8463: case MAT_REUSE_MATRIX:
8464: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8465: break;
8466: default:
8467: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8468: }
8469: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8470: goto setproperties;
8471: }
8473: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8474: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8475: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8477: setproperties:
8478: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8479: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8480: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8481: }
8482: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8483: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8484: PetscFunctionReturn(PETSC_SUCCESS);
8485: }
8487: /*@
8488: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8490: Not Collective
8492: Input Parameters:
8493: + A - the matrix we wish to propagate options from
8494: - B - the matrix we wish to propagate options to
8496: Level: beginner
8498: Note:
8499: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8501: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8502: @*/
8503: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8504: {
8505: PetscFunctionBegin;
8508: B->symmetry_eternal = A->symmetry_eternal;
8509: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8510: B->symmetric = A->symmetric;
8511: B->structurally_symmetric = A->structurally_symmetric;
8512: B->spd = A->spd;
8513: B->hermitian = A->hermitian;
8514: PetscFunctionReturn(PETSC_SUCCESS);
8515: }
8517: /*@
8518: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8519: used during the assembly process to store values that belong to
8520: other processors.
8522: Not Collective
8524: Input Parameters:
8525: + mat - the matrix
8526: . size - the initial size of the stash.
8527: - bsize - the initial size of the block-stash(if used).
8529: Options Database Keys:
8530: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8531: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8533: Level: intermediate
8535: Notes:
8536: The block-stash is used for values set with `MatSetValuesBlocked()` while
8537: the stash is used for values set with `MatSetValues()`
8539: Run with the option -info and look for output of the form
8540: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8541: to determine the appropriate value, MM, to use for size and
8542: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8543: to determine the value, BMM to use for bsize
8545: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8546: @*/
8547: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8548: {
8549: PetscFunctionBegin;
8552: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8553: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8554: PetscFunctionReturn(PETSC_SUCCESS);
8555: }
8557: /*@
8558: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8559: the matrix
8561: Neighbor-wise Collective
8563: Input Parameters:
8564: + A - the matrix
8565: . x - the vector to be multiplied by the interpolation operator
8566: - y - the vector to be added to the result
8568: Output Parameter:
8569: . w - the resulting vector
8571: Level: intermediate
8573: Notes:
8574: `w` may be the same vector as `y`.
8576: This allows one to use either the restriction or interpolation (its transpose)
8577: matrix to do the interpolation
8579: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8580: @*/
8581: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8582: {
8583: PetscInt M, N, Ny;
8585: PetscFunctionBegin;
8590: PetscCall(MatGetSize(A, &M, &N));
8591: PetscCall(VecGetSize(y, &Ny));
8592: if (M == Ny) {
8593: PetscCall(MatMultAdd(A, x, y, w));
8594: } else {
8595: PetscCall(MatMultTransposeAdd(A, x, y, w));
8596: }
8597: PetscFunctionReturn(PETSC_SUCCESS);
8598: }
8600: /*@
8601: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8602: the matrix
8604: Neighbor-wise Collective
8606: Input Parameters:
8607: + A - the matrix
8608: - x - the vector to be interpolated
8610: Output Parameter:
8611: . y - the resulting vector
8613: Level: intermediate
8615: Note:
8616: This allows one to use either the restriction or interpolation (its transpose)
8617: matrix to do the interpolation
8619: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8620: @*/
8621: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8622: {
8623: PetscInt M, N, Ny;
8625: PetscFunctionBegin;
8629: PetscCall(MatGetSize(A, &M, &N));
8630: PetscCall(VecGetSize(y, &Ny));
8631: if (M == Ny) {
8632: PetscCall(MatMult(A, x, y));
8633: } else {
8634: PetscCall(MatMultTranspose(A, x, y));
8635: }
8636: PetscFunctionReturn(PETSC_SUCCESS);
8637: }
8639: /*@
8640: MatRestrict - $y = A*x$ or $A^T*x$
8642: Neighbor-wise Collective
8644: Input Parameters:
8645: + A - the matrix
8646: - x - the vector to be restricted
8648: Output Parameter:
8649: . y - the resulting vector
8651: Level: intermediate
8653: Note:
8654: This allows one to use either the restriction or interpolation (its transpose)
8655: matrix to do the restriction
8657: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8658: @*/
8659: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8660: {
8661: PetscInt M, N, Nx;
8663: PetscFunctionBegin;
8667: PetscCall(MatGetSize(A, &M, &N));
8668: PetscCall(VecGetSize(x, &Nx));
8669: if (M == Nx) {
8670: PetscCall(MatMultTranspose(A, x, y));
8671: } else {
8672: PetscCall(MatMult(A, x, y));
8673: }
8674: PetscFunctionReturn(PETSC_SUCCESS);
8675: }
8677: /*@
8678: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8680: Neighbor-wise Collective
8682: Input Parameters:
8683: + A - the matrix
8684: . x - the input dense matrix to be multiplied
8685: - w - the input dense matrix to be added to the result
8687: Output Parameter:
8688: . y - the output dense matrix
8690: Level: intermediate
8692: Note:
8693: This allows one to use either the restriction or interpolation (its transpose)
8694: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8695: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8697: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8698: @*/
8699: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8700: {
8701: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8702: PetscBool trans = PETSC_TRUE;
8703: MatReuse reuse = MAT_INITIAL_MATRIX;
8705: PetscFunctionBegin;
8711: PetscCall(MatGetSize(A, &M, &N));
8712: PetscCall(MatGetSize(x, &Mx, &Nx));
8713: if (N == Mx) trans = PETSC_FALSE;
8714: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8715: Mo = trans ? N : M;
8716: if (*y) {
8717: PetscCall(MatGetSize(*y, &My, &Ny));
8718: if (Mo == My && Nx == Ny) {
8719: reuse = MAT_REUSE_MATRIX;
8720: } else {
8721: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8722: PetscCall(MatDestroy(y));
8723: }
8724: }
8726: if (w && *y == w) { /* this is to minimize changes in PCMG */
8727: PetscBool flg;
8729: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8730: if (w) {
8731: PetscInt My, Ny, Mw, Nw;
8733: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8734: PetscCall(MatGetSize(*y, &My, &Ny));
8735: PetscCall(MatGetSize(w, &Mw, &Nw));
8736: if (!flg || My != Mw || Ny != Nw) w = NULL;
8737: }
8738: if (!w) {
8739: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8740: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8741: PetscCall(PetscObjectDereference((PetscObject)w));
8742: } else {
8743: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8744: }
8745: }
8746: if (!trans) {
8747: PetscCall(MatMatMult(A, x, reuse, PETSC_DETERMINE, y));
8748: } else {
8749: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DETERMINE, y));
8750: }
8751: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8752: PetscFunctionReturn(PETSC_SUCCESS);
8753: }
8755: /*@
8756: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8758: Neighbor-wise Collective
8760: Input Parameters:
8761: + A - the matrix
8762: - x - the input dense matrix
8764: Output Parameter:
8765: . y - the output dense matrix
8767: Level: intermediate
8769: Note:
8770: This allows one to use either the restriction or interpolation (its transpose)
8771: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8772: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8774: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8775: @*/
8776: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8777: {
8778: PetscFunctionBegin;
8779: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8780: PetscFunctionReturn(PETSC_SUCCESS);
8781: }
8783: /*@
8784: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8786: Neighbor-wise Collective
8788: Input Parameters:
8789: + A - the matrix
8790: - x - the input dense matrix
8792: Output Parameter:
8793: . y - the output dense matrix
8795: Level: intermediate
8797: Note:
8798: This allows one to use either the restriction or interpolation (its transpose)
8799: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
8800: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8802: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8803: @*/
8804: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8805: {
8806: PetscFunctionBegin;
8807: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8808: PetscFunctionReturn(PETSC_SUCCESS);
8809: }
8811: /*@
8812: MatGetNullSpace - retrieves the null space of a matrix.
8814: Logically Collective
8816: Input Parameters:
8817: + mat - the matrix
8818: - nullsp - the null space object
8820: Level: developer
8822: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8823: @*/
8824: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8825: {
8826: PetscFunctionBegin;
8828: PetscAssertPointer(nullsp, 2);
8829: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8830: PetscFunctionReturn(PETSC_SUCCESS);
8831: }
8833: /*@C
8834: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
8836: Logically Collective
8838: Input Parameters:
8839: + n - the number of matrices
8840: - mat - the array of matrices
8842: Output Parameters:
8843: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
8845: Level: developer
8847: Note:
8848: Call `MatRestoreNullspaces()` to provide these to another array of matrices
8850: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8851: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
8852: @*/
8853: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8854: {
8855: PetscFunctionBegin;
8856: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8857: PetscAssertPointer(mat, 2);
8858: PetscAssertPointer(nullsp, 3);
8860: PetscCall(PetscCalloc1(3 * n, nullsp));
8861: for (PetscInt i = 0; i < n; i++) {
8863: (*nullsp)[i] = mat[i]->nullsp;
8864: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
8865: (*nullsp)[n + i] = mat[i]->nearnullsp;
8866: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
8867: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
8868: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
8869: }
8870: PetscFunctionReturn(PETSC_SUCCESS);
8871: }
8873: /*@C
8874: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
8876: Logically Collective
8878: Input Parameters:
8879: + n - the number of matrices
8880: . mat - the array of matrices
8881: - nullsp - an array of null spaces
8883: Level: developer
8885: Note:
8886: Call `MatGetNullSpaces()` to create `nullsp`
8888: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8889: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
8890: @*/
8891: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8892: {
8893: PetscFunctionBegin;
8894: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8895: PetscAssertPointer(mat, 2);
8896: PetscAssertPointer(nullsp, 3);
8897: PetscAssertPointer(*nullsp, 3);
8899: for (PetscInt i = 0; i < n; i++) {
8901: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
8902: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
8903: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
8904: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
8905: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
8906: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
8907: }
8908: PetscCall(PetscFree(*nullsp));
8909: PetscFunctionReturn(PETSC_SUCCESS);
8910: }
8912: /*@
8913: MatSetNullSpace - attaches a null space to a matrix.
8915: Logically Collective
8917: Input Parameters:
8918: + mat - the matrix
8919: - nullsp - the null space object
8921: Level: advanced
8923: Notes:
8924: This null space is used by the `KSP` linear solvers to solve singular systems.
8926: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
8928: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge
8929: to zero but the linear system will still be solved in a least squares sense.
8931: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
8932: the domain of a matrix $A$ (from $R^n$ to $R^m$ ($m$ rows, $n$ columns) $R^n$ = the direct sum of the null space of $A$, $n(A)$, plus the range of $A^T$, $R(A^T)$.
8933: Similarly $R^m$ = direct sum $n(A^T) + R(A)$. Hence the linear system $A x = b$ has a solution only if $b$ in $R(A)$ (or correspondingly $b$ is orthogonal to
8934: $n(A^T))$ and if $x$ is a solution then $x + \alpha n(A)$ is a solution for any $\alpha$. The minimum norm solution is orthogonal to $n(A)$. For problems without a solution
8935: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving $A x = \hat{b}$ where $\hat{b}$ is $b$ orthogonalized to the $n(A^T)$.
8936: This $\hat{b}$ can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
8938: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one has called
8939: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
8940: routine also automatically calls `MatSetTransposeNullSpace()`.
8942: The user should call `MatNullSpaceDestroy()`.
8944: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
8945: `KSPSetPCSide()`
8946: @*/
8947: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
8948: {
8949: PetscFunctionBegin;
8952: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
8953: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
8954: mat->nullsp = nullsp;
8955: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
8956: PetscFunctionReturn(PETSC_SUCCESS);
8957: }
8959: /*@
8960: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
8962: Logically Collective
8964: Input Parameters:
8965: + mat - the matrix
8966: - nullsp - the null space object
8968: Level: developer
8970: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
8971: @*/
8972: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
8973: {
8974: PetscFunctionBegin;
8977: PetscAssertPointer(nullsp, 2);
8978: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
8979: PetscFunctionReturn(PETSC_SUCCESS);
8980: }
8982: /*@
8983: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
8985: Logically Collective
8987: Input Parameters:
8988: + mat - the matrix
8989: - nullsp - the null space object
8991: Level: advanced
8993: Notes:
8994: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
8996: See `MatSetNullSpace()`
8998: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
8999: @*/
9000: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9001: {
9002: PetscFunctionBegin;
9005: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9006: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9007: mat->transnullsp = nullsp;
9008: PetscFunctionReturn(PETSC_SUCCESS);
9009: }
9011: /*@
9012: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9013: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9015: Logically Collective
9017: Input Parameters:
9018: + mat - the matrix
9019: - nullsp - the null space object
9021: Level: advanced
9023: Notes:
9024: Overwrites any previous near null space that may have been attached
9026: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9028: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9029: @*/
9030: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9031: {
9032: PetscFunctionBegin;
9036: MatCheckPreallocated(mat, 1);
9037: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9038: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9039: mat->nearnullsp = nullsp;
9040: PetscFunctionReturn(PETSC_SUCCESS);
9041: }
9043: /*@
9044: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9046: Not Collective
9048: Input Parameter:
9049: . mat - the matrix
9051: Output Parameter:
9052: . nullsp - the null space object, `NULL` if not set
9054: Level: advanced
9056: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9057: @*/
9058: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9059: {
9060: PetscFunctionBegin;
9063: PetscAssertPointer(nullsp, 2);
9064: MatCheckPreallocated(mat, 1);
9065: *nullsp = mat->nearnullsp;
9066: PetscFunctionReturn(PETSC_SUCCESS);
9067: }
9069: /*@
9070: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9072: Collective
9074: Input Parameters:
9075: + mat - the matrix
9076: . row - row/column permutation
9077: - info - information on desired factorization process
9079: Level: developer
9081: Notes:
9082: Probably really in-place only when level of fill is zero, otherwise allocates
9083: new space to store factored matrix and deletes previous memory.
9085: Most users should employ the `KSP` interface for linear solvers
9086: instead of working directly with matrix algebra routines such as this.
9087: See, e.g., `KSPCreate()`.
9089: Fortran Note:
9090: A valid (non-null) `info` argument must be provided
9092: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9093: @*/
9094: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9095: {
9096: PetscFunctionBegin;
9100: PetscAssertPointer(info, 3);
9101: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9102: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9103: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9104: MatCheckPreallocated(mat, 1);
9105: PetscUseTypeMethod(mat, iccfactor, row, info);
9106: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9107: PetscFunctionReturn(PETSC_SUCCESS);
9108: }
9110: /*@
9111: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9112: ghosted ones.
9114: Not Collective
9116: Input Parameters:
9117: + mat - the matrix
9118: - diag - the diagonal values, including ghost ones
9120: Level: developer
9122: Notes:
9123: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9125: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9127: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9128: @*/
9129: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9130: {
9131: PetscMPIInt size;
9133: PetscFunctionBegin;
9138: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9139: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9140: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9141: if (size == 1) {
9142: PetscInt n, m;
9143: PetscCall(VecGetSize(diag, &n));
9144: PetscCall(MatGetSize(mat, NULL, &m));
9145: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9146: PetscCall(MatDiagonalScale(mat, NULL, diag));
9147: } else {
9148: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9149: }
9150: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9151: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9152: PetscFunctionReturn(PETSC_SUCCESS);
9153: }
9155: /*@
9156: MatGetInertia - Gets the inertia from a factored matrix
9158: Collective
9160: Input Parameter:
9161: . mat - the matrix
9163: Output Parameters:
9164: + nneg - number of negative eigenvalues
9165: . nzero - number of zero eigenvalues
9166: - npos - number of positive eigenvalues
9168: Level: advanced
9170: Note:
9171: Matrix must have been factored by `MatCholeskyFactor()`
9173: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9174: @*/
9175: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9176: {
9177: PetscFunctionBegin;
9180: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9181: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9182: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9183: PetscFunctionReturn(PETSC_SUCCESS);
9184: }
9186: /*@C
9187: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9189: Neighbor-wise Collective
9191: Input Parameters:
9192: + mat - the factored matrix obtained with `MatGetFactor()`
9193: - b - the right-hand-side vectors
9195: Output Parameter:
9196: . x - the result vectors
9198: Level: developer
9200: Note:
9201: The vectors `b` and `x` cannot be the same. I.e., one cannot
9202: call `MatSolves`(A,x,x).
9204: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9205: @*/
9206: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9207: {
9208: PetscFunctionBegin;
9211: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9212: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9213: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9215: MatCheckPreallocated(mat, 1);
9216: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9217: PetscUseTypeMethod(mat, solves, b, x);
9218: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9219: PetscFunctionReturn(PETSC_SUCCESS);
9220: }
9222: /*@
9223: MatIsSymmetric - Test whether a matrix is symmetric
9225: Collective
9227: Input Parameters:
9228: + A - the matrix to test
9229: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9231: Output Parameter:
9232: . flg - the result
9234: Level: intermediate
9236: Notes:
9237: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9239: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9241: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9242: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9244: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9245: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9246: @*/
9247: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9248: {
9249: PetscFunctionBegin;
9251: PetscAssertPointer(flg, 3);
9252: if (A->symmetric != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->symmetric);
9253: else {
9254: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9255: else PetscCall(MatIsTranspose(A, A, tol, flg));
9256: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9257: }
9258: PetscFunctionReturn(PETSC_SUCCESS);
9259: }
9261: /*@
9262: MatIsHermitian - Test whether a matrix is Hermitian
9264: Collective
9266: Input Parameters:
9267: + A - the matrix to test
9268: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9270: Output Parameter:
9271: . flg - the result
9273: Level: intermediate
9275: Notes:
9276: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9278: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9280: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9281: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9283: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9284: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9285: @*/
9286: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9287: {
9288: PetscFunctionBegin;
9290: PetscAssertPointer(flg, 3);
9291: if (A->hermitian != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->hermitian);
9292: else {
9293: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9294: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9295: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9296: }
9297: PetscFunctionReturn(PETSC_SUCCESS);
9298: }
9300: /*@
9301: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9303: Not Collective
9305: Input Parameter:
9306: . A - the matrix to check
9308: Output Parameters:
9309: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9310: - flg - the result (only valid if set is `PETSC_TRUE`)
9312: Level: advanced
9314: Notes:
9315: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9316: if you want it explicitly checked
9318: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9319: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9321: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9322: @*/
9323: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9324: {
9325: PetscFunctionBegin;
9327: PetscAssertPointer(set, 2);
9328: PetscAssertPointer(flg, 3);
9329: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9330: *set = PETSC_TRUE;
9331: *flg = PetscBool3ToBool(A->symmetric);
9332: } else {
9333: *set = PETSC_FALSE;
9334: }
9335: PetscFunctionReturn(PETSC_SUCCESS);
9336: }
9338: /*@
9339: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9341: Not Collective
9343: Input Parameter:
9344: . A - the matrix to check
9346: Output Parameters:
9347: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9348: - flg - the result (only valid if set is `PETSC_TRUE`)
9350: Level: advanced
9352: Notes:
9353: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9355: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9356: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9358: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9359: @*/
9360: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9361: {
9362: PetscFunctionBegin;
9364: PetscAssertPointer(set, 2);
9365: PetscAssertPointer(flg, 3);
9366: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9367: *set = PETSC_TRUE;
9368: *flg = PetscBool3ToBool(A->spd);
9369: } else {
9370: *set = PETSC_FALSE;
9371: }
9372: PetscFunctionReturn(PETSC_SUCCESS);
9373: }
9375: /*@
9376: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9378: Not Collective
9380: Input Parameter:
9381: . A - the matrix to check
9383: Output Parameters:
9384: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9385: - flg - the result (only valid if set is `PETSC_TRUE`)
9387: Level: advanced
9389: Notes:
9390: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9391: if you want it explicitly checked
9393: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9394: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9396: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9397: @*/
9398: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9399: {
9400: PetscFunctionBegin;
9402: PetscAssertPointer(set, 2);
9403: PetscAssertPointer(flg, 3);
9404: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9405: *set = PETSC_TRUE;
9406: *flg = PetscBool3ToBool(A->hermitian);
9407: } else {
9408: *set = PETSC_FALSE;
9409: }
9410: PetscFunctionReturn(PETSC_SUCCESS);
9411: }
9413: /*@
9414: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9416: Collective
9418: Input Parameter:
9419: . A - the matrix to test
9421: Output Parameter:
9422: . flg - the result
9424: Level: intermediate
9426: Notes:
9427: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9429: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9430: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9432: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9433: @*/
9434: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9435: {
9436: PetscFunctionBegin;
9438: PetscAssertPointer(flg, 2);
9439: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9440: *flg = PetscBool3ToBool(A->structurally_symmetric);
9441: } else {
9442: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9443: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9444: }
9445: PetscFunctionReturn(PETSC_SUCCESS);
9446: }
9448: /*@
9449: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9451: Not Collective
9453: Input Parameter:
9454: . A - the matrix to check
9456: Output Parameters:
9457: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9458: - flg - the result (only valid if set is PETSC_TRUE)
9460: Level: advanced
9462: Notes:
9463: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9464: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9466: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9468: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9469: @*/
9470: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9471: {
9472: PetscFunctionBegin;
9474: PetscAssertPointer(set, 2);
9475: PetscAssertPointer(flg, 3);
9476: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9477: *set = PETSC_TRUE;
9478: *flg = PetscBool3ToBool(A->structurally_symmetric);
9479: } else {
9480: *set = PETSC_FALSE;
9481: }
9482: PetscFunctionReturn(PETSC_SUCCESS);
9483: }
9485: /*@
9486: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9487: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9489: Not Collective
9491: Input Parameter:
9492: . mat - the matrix
9494: Output Parameters:
9495: + nstash - the size of the stash
9496: . reallocs - the number of additional mallocs incurred.
9497: . bnstash - the size of the block stash
9498: - breallocs - the number of additional mallocs incurred.in the block stash
9500: Level: advanced
9502: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9503: @*/
9504: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9505: {
9506: PetscFunctionBegin;
9507: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9508: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9509: PetscFunctionReturn(PETSC_SUCCESS);
9510: }
9512: /*@
9513: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9514: parallel layout, `PetscLayout` for rows and columns
9516: Collective
9518: Input Parameter:
9519: . mat - the matrix
9521: Output Parameters:
9522: + right - (optional) vector that the matrix can be multiplied against
9523: - left - (optional) vector that the matrix vector product can be stored in
9525: Level: advanced
9527: Notes:
9528: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9530: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9532: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9533: @*/
9534: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9535: {
9536: PetscFunctionBegin;
9539: if (mat->ops->getvecs) {
9540: PetscUseTypeMethod(mat, getvecs, right, left);
9541: } else {
9542: if (right) {
9543: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9544: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9545: PetscCall(VecSetType(*right, mat->defaultvectype));
9546: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9547: if (mat->boundtocpu && mat->bindingpropagates) {
9548: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9549: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9550: }
9551: #endif
9552: }
9553: if (left) {
9554: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9555: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9556: PetscCall(VecSetType(*left, mat->defaultvectype));
9557: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9558: if (mat->boundtocpu && mat->bindingpropagates) {
9559: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9560: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9561: }
9562: #endif
9563: }
9564: }
9565: PetscFunctionReturn(PETSC_SUCCESS);
9566: }
9568: /*@
9569: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9570: with default values.
9572: Not Collective
9574: Input Parameter:
9575: . info - the `MatFactorInfo` data structure
9577: Level: developer
9579: Notes:
9580: The solvers are generally used through the `KSP` and `PC` objects, for example
9581: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9583: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9585: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9586: @*/
9587: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9588: {
9589: PetscFunctionBegin;
9590: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9591: PetscFunctionReturn(PETSC_SUCCESS);
9592: }
9594: /*@
9595: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9597: Collective
9599: Input Parameters:
9600: + mat - the factored matrix
9601: - is - the index set defining the Schur indices (0-based)
9603: Level: advanced
9605: Notes:
9606: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9608: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9610: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9612: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9613: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9614: @*/
9615: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9616: {
9617: PetscErrorCode (*f)(Mat, IS);
9619: PetscFunctionBegin;
9624: PetscCheckSameComm(mat, 1, is, 2);
9625: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9626: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9627: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9628: PetscCall(MatDestroy(&mat->schur));
9629: PetscCall((*f)(mat, is));
9630: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9631: PetscFunctionReturn(PETSC_SUCCESS);
9632: }
9634: /*@
9635: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9637: Logically Collective
9639: Input Parameters:
9640: + F - the factored matrix obtained by calling `MatGetFactor()`
9641: . S - location where to return the Schur complement, can be `NULL`
9642: - status - the status of the Schur complement matrix, can be `NULL`
9644: Level: advanced
9646: Notes:
9647: You must call `MatFactorSetSchurIS()` before calling this routine.
9649: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9651: The routine provides a copy of the Schur matrix stored within the solver data structures.
9652: The caller must destroy the object when it is no longer needed.
9653: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9655: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9657: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9659: Developer Note:
9660: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9661: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9663: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9664: @*/
9665: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9666: {
9667: PetscFunctionBegin;
9669: if (S) PetscAssertPointer(S, 2);
9670: if (status) PetscAssertPointer(status, 3);
9671: if (S) {
9672: PetscErrorCode (*f)(Mat, Mat *);
9674: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9675: if (f) {
9676: PetscCall((*f)(F, S));
9677: } else {
9678: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9679: }
9680: }
9681: if (status) *status = F->schur_status;
9682: PetscFunctionReturn(PETSC_SUCCESS);
9683: }
9685: /*@
9686: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9688: Logically Collective
9690: Input Parameters:
9691: + F - the factored matrix obtained by calling `MatGetFactor()`
9692: . S - location where to return the Schur complement, can be `NULL`
9693: - status - the status of the Schur complement matrix, can be `NULL`
9695: Level: advanced
9697: Notes:
9698: You must call `MatFactorSetSchurIS()` before calling this routine.
9700: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9702: The routine returns a the Schur Complement stored within the data structures of the solver.
9704: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9706: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9708: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9710: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9712: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9713: @*/
9714: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9715: {
9716: PetscFunctionBegin;
9718: if (S) {
9719: PetscAssertPointer(S, 2);
9720: *S = F->schur;
9721: }
9722: if (status) {
9723: PetscAssertPointer(status, 3);
9724: *status = F->schur_status;
9725: }
9726: PetscFunctionReturn(PETSC_SUCCESS);
9727: }
9729: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9730: {
9731: Mat S = F->schur;
9733: PetscFunctionBegin;
9734: switch (F->schur_status) {
9735: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9736: case MAT_FACTOR_SCHUR_INVERTED:
9737: if (S) {
9738: S->ops->solve = NULL;
9739: S->ops->matsolve = NULL;
9740: S->ops->solvetranspose = NULL;
9741: S->ops->matsolvetranspose = NULL;
9742: S->ops->solveadd = NULL;
9743: S->ops->solvetransposeadd = NULL;
9744: S->factortype = MAT_FACTOR_NONE;
9745: PetscCall(PetscFree(S->solvertype));
9746: }
9747: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9748: break;
9749: default:
9750: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9751: }
9752: PetscFunctionReturn(PETSC_SUCCESS);
9753: }
9755: /*@
9756: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9758: Logically Collective
9760: Input Parameters:
9761: + F - the factored matrix obtained by calling `MatGetFactor()`
9762: . S - location where the Schur complement is stored
9763: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9765: Level: advanced
9767: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9768: @*/
9769: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9770: {
9771: PetscFunctionBegin;
9773: if (S) {
9775: *S = NULL;
9776: }
9777: F->schur_status = status;
9778: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9779: PetscFunctionReturn(PETSC_SUCCESS);
9780: }
9782: /*@
9783: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9785: Logically Collective
9787: Input Parameters:
9788: + F - the factored matrix obtained by calling `MatGetFactor()`
9789: . rhs - location where the right-hand side of the Schur complement system is stored
9790: - sol - location where the solution of the Schur complement system has to be returned
9792: Level: advanced
9794: Notes:
9795: The sizes of the vectors should match the size of the Schur complement
9797: Must be called after `MatFactorSetSchurIS()`
9799: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9800: @*/
9801: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9802: {
9803: PetscFunctionBegin;
9810: PetscCheckSameComm(F, 1, rhs, 2);
9811: PetscCheckSameComm(F, 1, sol, 3);
9812: PetscCall(MatFactorFactorizeSchurComplement(F));
9813: switch (F->schur_status) {
9814: case MAT_FACTOR_SCHUR_FACTORED:
9815: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9816: break;
9817: case MAT_FACTOR_SCHUR_INVERTED:
9818: PetscCall(MatMultTranspose(F->schur, rhs, sol));
9819: break;
9820: default:
9821: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9822: }
9823: PetscFunctionReturn(PETSC_SUCCESS);
9824: }
9826: /*@
9827: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9829: Logically Collective
9831: Input Parameters:
9832: + F - the factored matrix obtained by calling `MatGetFactor()`
9833: . rhs - location where the right-hand side of the Schur complement system is stored
9834: - sol - location where the solution of the Schur complement system has to be returned
9836: Level: advanced
9838: Notes:
9839: The sizes of the vectors should match the size of the Schur complement
9841: Must be called after `MatFactorSetSchurIS()`
9843: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
9844: @*/
9845: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
9846: {
9847: PetscFunctionBegin;
9854: PetscCheckSameComm(F, 1, rhs, 2);
9855: PetscCheckSameComm(F, 1, sol, 3);
9856: PetscCall(MatFactorFactorizeSchurComplement(F));
9857: switch (F->schur_status) {
9858: case MAT_FACTOR_SCHUR_FACTORED:
9859: PetscCall(MatSolve(F->schur, rhs, sol));
9860: break;
9861: case MAT_FACTOR_SCHUR_INVERTED:
9862: PetscCall(MatMult(F->schur, rhs, sol));
9863: break;
9864: default:
9865: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9866: }
9867: PetscFunctionReturn(PETSC_SUCCESS);
9868: }
9870: PETSC_EXTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
9871: #if PetscDefined(HAVE_CUDA)
9872: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
9873: #endif
9875: /* Schur status updated in the interface */
9876: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
9877: {
9878: Mat S = F->schur;
9880: PetscFunctionBegin;
9881: if (S) {
9882: PetscMPIInt size;
9883: PetscBool isdense, isdensecuda;
9885: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
9886: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
9887: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
9888: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
9889: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
9890: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
9891: if (isdense) {
9892: PetscCall(MatSeqDenseInvertFactors_Private(S));
9893: } else if (isdensecuda) {
9894: #if defined(PETSC_HAVE_CUDA)
9895: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
9896: #endif
9897: }
9898: // HIP??????????????
9899: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
9900: }
9901: PetscFunctionReturn(PETSC_SUCCESS);
9902: }
9904: /*@
9905: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
9907: Logically Collective
9909: Input Parameter:
9910: . F - the factored matrix obtained by calling `MatGetFactor()`
9912: Level: advanced
9914: Notes:
9915: Must be called after `MatFactorSetSchurIS()`.
9917: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
9919: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
9920: @*/
9921: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
9922: {
9923: PetscFunctionBegin;
9926: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
9927: PetscCall(MatFactorFactorizeSchurComplement(F));
9928: PetscCall(MatFactorInvertSchurComplement_Private(F));
9929: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
9930: PetscFunctionReturn(PETSC_SUCCESS);
9931: }
9933: /*@
9934: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
9936: Logically Collective
9938: Input Parameter:
9939: . F - the factored matrix obtained by calling `MatGetFactor()`
9941: Level: advanced
9943: Note:
9944: Must be called after `MatFactorSetSchurIS()`
9946: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
9947: @*/
9948: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
9949: {
9950: MatFactorInfo info;
9952: PetscFunctionBegin;
9955: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
9956: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
9957: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
9958: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
9959: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
9960: } else {
9961: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
9962: }
9963: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
9964: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
9965: PetscFunctionReturn(PETSC_SUCCESS);
9966: }
9968: /*@
9969: MatPtAP - Creates the matrix product $C = P^T * A * P$
9971: Neighbor-wise Collective
9973: Input Parameters:
9974: + A - the matrix
9975: . P - the projection matrix
9976: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
9977: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
9978: if the result is a dense matrix this is irrelevant
9980: Output Parameter:
9981: . C - the product matrix
9983: Level: intermediate
9985: Notes:
9986: C will be created and must be destroyed by the user with `MatDestroy()`.
9988: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
9990: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
9992: Developer Note:
9993: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
9995: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
9996: @*/
9997: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
9998: {
9999: PetscFunctionBegin;
10000: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10001: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10003: if (scall == MAT_INITIAL_MATRIX) {
10004: PetscCall(MatProductCreate(A, P, NULL, C));
10005: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10006: PetscCall(MatProductSetAlgorithm(*C, "default"));
10007: PetscCall(MatProductSetFill(*C, fill));
10009: (*C)->product->api_user = PETSC_TRUE;
10010: PetscCall(MatProductSetFromOptions(*C));
10011: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10012: PetscCall(MatProductSymbolic(*C));
10013: } else { /* scall == MAT_REUSE_MATRIX */
10014: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10015: }
10017: PetscCall(MatProductNumeric(*C));
10018: (*C)->symmetric = A->symmetric;
10019: (*C)->spd = A->spd;
10020: PetscFunctionReturn(PETSC_SUCCESS);
10021: }
10023: /*@
10024: MatRARt - Creates the matrix product $C = R * A * R^T$
10026: Neighbor-wise Collective
10028: Input Parameters:
10029: + A - the matrix
10030: . R - the projection matrix
10031: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10032: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10033: if the result is a dense matrix this is irrelevant
10035: Output Parameter:
10036: . C - the product matrix
10038: Level: intermediate
10040: Notes:
10041: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10043: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10045: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10046: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10047: the parallel `MatRARt()` is implemented computing the explicit transpose of `R`, which can be very expensive.
10048: We recommend using `MatPtAP()` when possible.
10050: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10052: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10053: @*/
10054: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10055: {
10056: PetscFunctionBegin;
10057: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10058: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10060: if (scall == MAT_INITIAL_MATRIX) {
10061: PetscCall(MatProductCreate(A, R, NULL, C));
10062: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10063: PetscCall(MatProductSetAlgorithm(*C, "default"));
10064: PetscCall(MatProductSetFill(*C, fill));
10066: (*C)->product->api_user = PETSC_TRUE;
10067: PetscCall(MatProductSetFromOptions(*C));
10068: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10069: PetscCall(MatProductSymbolic(*C));
10070: } else { /* scall == MAT_REUSE_MATRIX */
10071: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10072: }
10074: PetscCall(MatProductNumeric(*C));
10075: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10076: PetscFunctionReturn(PETSC_SUCCESS);
10077: }
10079: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10080: {
10081: PetscBool flg = PETSC_TRUE;
10083: PetscFunctionBegin;
10084: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10085: if (scall == MAT_INITIAL_MATRIX) {
10086: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10087: PetscCall(MatProductCreate(A, B, NULL, C));
10088: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10089: PetscCall(MatProductSetFill(*C, fill));
10090: } else { /* scall == MAT_REUSE_MATRIX */
10091: Mat_Product *product = (*C)->product;
10093: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10094: if (flg && product && product->type != ptype) {
10095: PetscCall(MatProductClear(*C));
10096: product = NULL;
10097: }
10098: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10099: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10100: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10101: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10102: product = (*C)->product;
10103: product->fill = fill;
10104: product->clear = PETSC_TRUE;
10105: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10106: flg = PETSC_FALSE;
10107: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10108: }
10109: }
10110: if (flg) {
10111: (*C)->product->api_user = PETSC_TRUE;
10112: PetscCall(MatProductSetType(*C, ptype));
10113: PetscCall(MatProductSetFromOptions(*C));
10114: PetscCall(MatProductSymbolic(*C));
10115: }
10116: PetscCall(MatProductNumeric(*C));
10117: PetscFunctionReturn(PETSC_SUCCESS);
10118: }
10120: /*@
10121: MatMatMult - Performs matrix-matrix multiplication C=A*B.
10123: Neighbor-wise Collective
10125: Input Parameters:
10126: + A - the left matrix
10127: . B - the right matrix
10128: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10129: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10130: if the result is a dense matrix this is irrelevant
10132: Output Parameter:
10133: . C - the product matrix
10135: Notes:
10136: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10138: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10139: call to this function with `MAT_INITIAL_MATRIX`.
10141: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value actually needed.
10143: In the special case where matrix `B` (and hence `C`) are dense you can create the correctly sized matrix `C` yourself and then call this routine with `MAT_REUSE_MATRIX`,
10144: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix `C` is sparse.
10146: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10148: Example of Usage:
10149: .vb
10150: MatProductCreate(A,B,NULL,&C);
10151: MatProductSetType(C,MATPRODUCT_AB);
10152: MatProductSymbolic(C);
10153: MatProductNumeric(C); // compute C=A * B
10154: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10155: MatProductNumeric(C);
10156: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10157: MatProductNumeric(C);
10158: .ve
10160: Level: intermediate
10162: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10163: @*/
10164: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10165: {
10166: PetscFunctionBegin;
10167: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10168: PetscFunctionReturn(PETSC_SUCCESS);
10169: }
10171: /*@
10172: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10174: Neighbor-wise Collective
10176: Input Parameters:
10177: + A - the left matrix
10178: . B - the right matrix
10179: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10180: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10182: Output Parameter:
10183: . C - the product matrix
10185: Options Database Key:
10186: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10187: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10188: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10190: Level: intermediate
10192: Notes:
10193: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10195: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10197: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10198: actually needed.
10200: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10201: and for pairs of `MATMPIDENSE` matrices.
10203: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`
10205: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10207: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10208: @*/
10209: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10210: {
10211: PetscFunctionBegin;
10212: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10213: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10214: PetscFunctionReturn(PETSC_SUCCESS);
10215: }
10217: /*@
10218: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10220: Neighbor-wise Collective
10222: Input Parameters:
10223: + A - the left matrix
10224: . B - the right matrix
10225: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10226: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10228: Output Parameter:
10229: . C - the product matrix
10231: Level: intermediate
10233: Notes:
10234: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10236: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
10238: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`
10240: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10241: actually needed.
10243: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10244: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10246: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10248: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10249: @*/
10250: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10251: {
10252: PetscFunctionBegin;
10253: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10254: PetscFunctionReturn(PETSC_SUCCESS);
10255: }
10257: /*@
10258: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10260: Neighbor-wise Collective
10262: Input Parameters:
10263: + A - the left matrix
10264: . B - the middle matrix
10265: . C - the right matrix
10266: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10267: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10268: if the result is a dense matrix this is irrelevant
10270: Output Parameter:
10271: . D - the product matrix
10273: Level: intermediate
10275: Notes:
10276: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10278: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10280: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`
10282: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10283: actually needed.
10285: If you have many matrices with the same non-zero structure to multiply, you
10286: should use `MAT_REUSE_MATRIX` in all calls but the first
10288: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10290: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10291: @*/
10292: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10293: {
10294: PetscFunctionBegin;
10295: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10296: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10298: if (scall == MAT_INITIAL_MATRIX) {
10299: PetscCall(MatProductCreate(A, B, C, D));
10300: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10301: PetscCall(MatProductSetAlgorithm(*D, "default"));
10302: PetscCall(MatProductSetFill(*D, fill));
10304: (*D)->product->api_user = PETSC_TRUE;
10305: PetscCall(MatProductSetFromOptions(*D));
10306: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10307: ((PetscObject)C)->type_name);
10308: PetscCall(MatProductSymbolic(*D));
10309: } else { /* user may change input matrices when REUSE */
10310: PetscCall(MatProductReplaceMats(A, B, C, *D));
10311: }
10312: PetscCall(MatProductNumeric(*D));
10313: PetscFunctionReturn(PETSC_SUCCESS);
10314: }
10316: /*@
10317: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10319: Collective
10321: Input Parameters:
10322: + mat - the matrix
10323: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10324: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10325: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10327: Output Parameter:
10328: . matredundant - redundant matrix
10330: Level: advanced
10332: Notes:
10333: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10334: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10336: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10337: calling it.
10339: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10341: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10342: @*/
10343: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10344: {
10345: MPI_Comm comm;
10346: PetscMPIInt size;
10347: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10348: Mat_Redundant *redund = NULL;
10349: PetscSubcomm psubcomm = NULL;
10350: MPI_Comm subcomm_in = subcomm;
10351: Mat *matseq;
10352: IS isrow, iscol;
10353: PetscBool newsubcomm = PETSC_FALSE;
10355: PetscFunctionBegin;
10357: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10358: PetscAssertPointer(*matredundant, 5);
10360: }
10362: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10363: if (size == 1 || nsubcomm == 1) {
10364: if (reuse == MAT_INITIAL_MATRIX) {
10365: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10366: } else {
10367: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10368: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10369: }
10370: PetscFunctionReturn(PETSC_SUCCESS);
10371: }
10373: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10374: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10375: MatCheckPreallocated(mat, 1);
10377: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10378: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10379: /* create psubcomm, then get subcomm */
10380: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10381: PetscCallMPI(MPI_Comm_size(comm, &size));
10382: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10384: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10385: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10386: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10387: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10388: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10389: newsubcomm = PETSC_TRUE;
10390: PetscCall(PetscSubcommDestroy(&psubcomm));
10391: }
10393: /* get isrow, iscol and a local sequential matrix matseq[0] */
10394: if (reuse == MAT_INITIAL_MATRIX) {
10395: mloc_sub = PETSC_DECIDE;
10396: nloc_sub = PETSC_DECIDE;
10397: if (bs < 1) {
10398: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10399: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10400: } else {
10401: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10402: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10403: }
10404: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10405: rstart = rend - mloc_sub;
10406: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10407: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10408: PetscCall(ISSetIdentity(iscol));
10409: } else { /* reuse == MAT_REUSE_MATRIX */
10410: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10411: /* retrieve subcomm */
10412: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10413: redund = (*matredundant)->redundant;
10414: isrow = redund->isrow;
10415: iscol = redund->iscol;
10416: matseq = redund->matseq;
10417: }
10418: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10420: /* get matredundant over subcomm */
10421: if (reuse == MAT_INITIAL_MATRIX) {
10422: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10424: /* create a supporting struct and attach it to C for reuse */
10425: PetscCall(PetscNew(&redund));
10426: (*matredundant)->redundant = redund;
10427: redund->isrow = isrow;
10428: redund->iscol = iscol;
10429: redund->matseq = matseq;
10430: if (newsubcomm) {
10431: redund->subcomm = subcomm;
10432: } else {
10433: redund->subcomm = MPI_COMM_NULL;
10434: }
10435: } else {
10436: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10437: }
10438: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10439: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10440: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10441: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10442: }
10443: #endif
10444: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10445: PetscFunctionReturn(PETSC_SUCCESS);
10446: }
10448: /*@C
10449: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10450: a given `Mat`. Each submatrix can span multiple procs.
10452: Collective
10454: Input Parameters:
10455: + mat - the matrix
10456: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10457: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10459: Output Parameter:
10460: . subMat - parallel sub-matrices each spanning a given `subcomm`
10462: Level: advanced
10464: Notes:
10465: The submatrix partition across processors is dictated by `subComm` a
10466: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10467: is not restricted to be grouped with consecutive original MPI processes.
10469: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10470: map directly to the layout of the original matrix [wrt the local
10471: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10472: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10473: the `subMat`. However the offDiagMat looses some columns - and this is
10474: reconstructed with `MatSetValues()`
10476: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10478: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10479: @*/
10480: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10481: {
10482: PetscMPIInt commsize, subCommSize;
10484: PetscFunctionBegin;
10485: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10486: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10487: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10489: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10490: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10491: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10492: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10493: PetscFunctionReturn(PETSC_SUCCESS);
10494: }
10496: /*@
10497: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10499: Not Collective
10501: Input Parameters:
10502: + mat - matrix to extract local submatrix from
10503: . isrow - local row indices for submatrix
10504: - iscol - local column indices for submatrix
10506: Output Parameter:
10507: . submat - the submatrix
10509: Level: intermediate
10511: Notes:
10512: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10514: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10515: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10517: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10518: `MatSetValuesBlockedLocal()` will also be implemented.
10520: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10521: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10523: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10524: @*/
10525: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10526: {
10527: PetscFunctionBegin;
10531: PetscCheckSameComm(isrow, 2, iscol, 3);
10532: PetscAssertPointer(submat, 4);
10533: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10535: if (mat->ops->getlocalsubmatrix) {
10536: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10537: } else {
10538: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10539: }
10540: PetscFunctionReturn(PETSC_SUCCESS);
10541: }
10543: /*@
10544: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10546: Not Collective
10548: Input Parameters:
10549: + mat - matrix to extract local submatrix from
10550: . isrow - local row indices for submatrix
10551: . iscol - local column indices for submatrix
10552: - submat - the submatrix
10554: Level: intermediate
10556: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10557: @*/
10558: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10559: {
10560: PetscFunctionBegin;
10564: PetscCheckSameComm(isrow, 2, iscol, 3);
10565: PetscAssertPointer(submat, 4);
10568: if (mat->ops->restorelocalsubmatrix) {
10569: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10570: } else {
10571: PetscCall(MatDestroy(submat));
10572: }
10573: *submat = NULL;
10574: PetscFunctionReturn(PETSC_SUCCESS);
10575: }
10577: /*@
10578: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10580: Collective
10582: Input Parameter:
10583: . mat - the matrix
10585: Output Parameter:
10586: . is - if any rows have zero diagonals this contains the list of them
10588: Level: developer
10590: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10591: @*/
10592: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10593: {
10594: PetscFunctionBegin;
10597: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10598: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10600: if (!mat->ops->findzerodiagonals) {
10601: Vec diag;
10602: const PetscScalar *a;
10603: PetscInt *rows;
10604: PetscInt rStart, rEnd, r, nrow = 0;
10606: PetscCall(MatCreateVecs(mat, &diag, NULL));
10607: PetscCall(MatGetDiagonal(mat, diag));
10608: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10609: PetscCall(VecGetArrayRead(diag, &a));
10610: for (r = 0; r < rEnd - rStart; ++r)
10611: if (a[r] == 0.0) ++nrow;
10612: PetscCall(PetscMalloc1(nrow, &rows));
10613: nrow = 0;
10614: for (r = 0; r < rEnd - rStart; ++r)
10615: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10616: PetscCall(VecRestoreArrayRead(diag, &a));
10617: PetscCall(VecDestroy(&diag));
10618: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10619: } else {
10620: PetscUseTypeMethod(mat, findzerodiagonals, is);
10621: }
10622: PetscFunctionReturn(PETSC_SUCCESS);
10623: }
10625: /*@
10626: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10628: Collective
10630: Input Parameter:
10631: . mat - the matrix
10633: Output Parameter:
10634: . is - contains the list of rows with off block diagonal entries
10636: Level: developer
10638: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10639: @*/
10640: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10641: {
10642: PetscFunctionBegin;
10645: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10646: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10648: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10649: PetscFunctionReturn(PETSC_SUCCESS);
10650: }
10652: /*@C
10653: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10655: Collective; No Fortran Support
10657: Input Parameter:
10658: . mat - the matrix
10660: Output Parameter:
10661: . values - the block inverses in column major order (FORTRAN-like)
10663: Level: advanced
10665: Notes:
10666: The size of the blocks is determined by the block size of the matrix.
10668: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10670: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10672: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10673: @*/
10674: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10675: {
10676: PetscFunctionBegin;
10678: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10679: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10680: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10681: PetscFunctionReturn(PETSC_SUCCESS);
10682: }
10684: /*@
10685: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10687: Collective; No Fortran Support
10689: Input Parameters:
10690: + mat - the matrix
10691: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10692: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10694: Output Parameter:
10695: . values - the block inverses in column major order (FORTRAN-like)
10697: Level: advanced
10699: Notes:
10700: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10702: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10704: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10705: @*/
10706: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10707: {
10708: PetscFunctionBegin;
10710: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10711: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10712: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10713: PetscFunctionReturn(PETSC_SUCCESS);
10714: }
10716: /*@
10717: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10719: Collective
10721: Input Parameters:
10722: + A - the matrix
10723: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10725: Level: advanced
10727: Note:
10728: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10730: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10731: @*/
10732: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10733: {
10734: const PetscScalar *vals;
10735: PetscInt *dnnz;
10736: PetscInt m, rstart, rend, bs, i, j;
10738: PetscFunctionBegin;
10739: PetscCall(MatInvertBlockDiagonal(A, &vals));
10740: PetscCall(MatGetBlockSize(A, &bs));
10741: PetscCall(MatGetLocalSize(A, &m, NULL));
10742: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10743: PetscCall(MatSetBlockSizes(C, A->rmap->bs, A->cmap->bs));
10744: PetscCall(PetscMalloc1(m / bs, &dnnz));
10745: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10746: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10747: PetscCall(PetscFree(dnnz));
10748: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10749: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10750: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10751: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10752: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10753: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10754: PetscFunctionReturn(PETSC_SUCCESS);
10755: }
10757: /*@
10758: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10759: via `MatTransposeColoringCreate()`.
10761: Collective
10763: Input Parameter:
10764: . c - coloring context
10766: Level: intermediate
10768: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10769: @*/
10770: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10771: {
10772: MatTransposeColoring matcolor = *c;
10774: PetscFunctionBegin;
10775: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10776: if (--((PetscObject)matcolor)->refct > 0) {
10777: matcolor = NULL;
10778: PetscFunctionReturn(PETSC_SUCCESS);
10779: }
10781: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10782: PetscCall(PetscFree(matcolor->rows));
10783: PetscCall(PetscFree(matcolor->den2sp));
10784: PetscCall(PetscFree(matcolor->colorforcol));
10785: PetscCall(PetscFree(matcolor->columns));
10786: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10787: PetscCall(PetscHeaderDestroy(c));
10788: PetscFunctionReturn(PETSC_SUCCESS);
10789: }
10791: /*@
10792: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
10793: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
10794: `MatTransposeColoring` to sparse `B`.
10796: Collective
10798: Input Parameters:
10799: + coloring - coloring context created with `MatTransposeColoringCreate()`
10800: - B - sparse matrix
10802: Output Parameter:
10803: . Btdense - dense matrix $B^T$
10805: Level: developer
10807: Note:
10808: These are used internally for some implementations of `MatRARt()`
10810: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10811: @*/
10812: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10813: {
10814: PetscFunctionBegin;
10819: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10820: PetscFunctionReturn(PETSC_SUCCESS);
10821: }
10823: /*@
10824: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
10825: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
10826: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10827: $C_{sp}$ from $C_{den}$.
10829: Collective
10831: Input Parameters:
10832: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10833: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
10835: Output Parameter:
10836: . Csp - sparse matrix
10838: Level: developer
10840: Note:
10841: These are used internally for some implementations of `MatRARt()`
10843: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
10844: @*/
10845: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
10846: {
10847: PetscFunctionBegin;
10852: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
10853: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
10854: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
10855: PetscFunctionReturn(PETSC_SUCCESS);
10856: }
10858: /*@
10859: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
10861: Collective
10863: Input Parameters:
10864: + mat - the matrix product C
10865: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
10867: Output Parameter:
10868: . color - the new coloring context
10870: Level: intermediate
10872: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
10873: `MatTransColoringApplyDenToSp()`
10874: @*/
10875: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
10876: {
10877: MatTransposeColoring c;
10878: MPI_Comm comm;
10880: PetscFunctionBegin;
10881: PetscAssertPointer(color, 3);
10883: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10884: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10885: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
10886: c->ctype = iscoloring->ctype;
10887: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
10888: *color = c;
10889: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10890: PetscFunctionReturn(PETSC_SUCCESS);
10891: }
10893: /*@
10894: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
10895: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
10897: Not Collective
10899: Input Parameter:
10900: . mat - the matrix
10902: Output Parameter:
10903: . state - the current state
10905: Level: intermediate
10907: Notes:
10908: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
10909: different matrices
10911: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
10913: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
10915: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
10916: @*/
10917: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
10918: {
10919: PetscFunctionBegin;
10921: *state = mat->nonzerostate;
10922: PetscFunctionReturn(PETSC_SUCCESS);
10923: }
10925: /*@
10926: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
10927: matrices from each processor
10929: Collective
10931: Input Parameters:
10932: + comm - the communicators the parallel matrix will live on
10933: . seqmat - the input sequential matrices
10934: . n - number of local columns (or `PETSC_DECIDE`)
10935: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10937: Output Parameter:
10938: . mpimat - the parallel matrix generated
10940: Level: developer
10942: Note:
10943: The number of columns of the matrix in EACH processor MUST be the same.
10945: .seealso: [](ch_matrices), `Mat`
10946: @*/
10947: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
10948: {
10949: PetscMPIInt size;
10951: PetscFunctionBegin;
10952: PetscCallMPI(MPI_Comm_size(comm, &size));
10953: if (size == 1) {
10954: if (reuse == MAT_INITIAL_MATRIX) {
10955: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
10956: } else {
10957: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
10958: }
10959: PetscFunctionReturn(PETSC_SUCCESS);
10960: }
10962: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10964: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
10965: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
10966: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
10967: PetscFunctionReturn(PETSC_SUCCESS);
10968: }
10970: /*@
10971: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
10973: Collective
10975: Input Parameters:
10976: + A - the matrix to create subdomains from
10977: - N - requested number of subdomains
10979: Output Parameters:
10980: + n - number of subdomains resulting on this MPI process
10981: - iss - `IS` list with indices of subdomains on this MPI process
10983: Level: advanced
10985: Note:
10986: The number of subdomains must be smaller than the communicator size
10988: .seealso: [](ch_matrices), `Mat`, `IS`
10989: @*/
10990: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
10991: {
10992: MPI_Comm comm, subcomm;
10993: PetscMPIInt size, rank, color;
10994: PetscInt rstart, rend, k;
10996: PetscFunctionBegin;
10997: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
10998: PetscCallMPI(MPI_Comm_size(comm, &size));
10999: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11000: PetscCheck(N >= 1 && N < size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11001: *n = 1;
11002: k = size / N + (size % N > 0); /* There are up to k ranks to a color */
11003: color = rank / k;
11004: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11005: PetscCall(PetscMalloc1(1, iss));
11006: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11007: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11008: PetscCallMPI(MPI_Comm_free(&subcomm));
11009: PetscFunctionReturn(PETSC_SUCCESS);
11010: }
11012: /*@
11013: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11015: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11016: If they are not the same, uses `MatMatMatMult()`.
11018: Once the coarse grid problem is constructed, correct for interpolation operators
11019: that are not of full rank, which can legitimately happen in the case of non-nested
11020: geometric multigrid.
11022: Input Parameters:
11023: + restrct - restriction operator
11024: . dA - fine grid matrix
11025: . interpolate - interpolation operator
11026: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11027: - fill - expected fill, use `PETSC_DETERMINE` or `PETSC_DETERMINE` if you do not have a good estimate
11029: Output Parameter:
11030: . A - the Galerkin coarse matrix
11032: Options Database Key:
11033: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
11035: Level: developer
11037: Note:
11038: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
11040: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11041: @*/
11042: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11043: {
11044: IS zerorows;
11045: Vec diag;
11047: PetscFunctionBegin;
11048: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11049: /* Construct the coarse grid matrix */
11050: if (interpolate == restrct) {
11051: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11052: } else {
11053: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11054: }
11056: /* If the interpolation matrix is not of full rank, A will have zero rows.
11057: This can legitimately happen in the case of non-nested geometric multigrid.
11058: In that event, we set the rows of the matrix to the rows of the identity,
11059: ignoring the equations (as the RHS will also be zero). */
11061: PetscCall(MatFindZeroRows(*A, &zerorows));
11063: if (zerorows != NULL) { /* if there are any zero rows */
11064: PetscCall(MatCreateVecs(*A, &diag, NULL));
11065: PetscCall(MatGetDiagonal(*A, diag));
11066: PetscCall(VecISSet(diag, zerorows, 1.0));
11067: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11068: PetscCall(VecDestroy(&diag));
11069: PetscCall(ISDestroy(&zerorows));
11070: }
11071: PetscFunctionReturn(PETSC_SUCCESS);
11072: }
11074: /*@C
11075: MatSetOperation - Allows user to set a matrix operation for any matrix type
11077: Logically Collective
11079: Input Parameters:
11080: + mat - the matrix
11081: . op - the name of the operation
11082: - f - the function that provides the operation
11084: Level: developer
11086: Example Usage:
11087: .vb
11088: extern PetscErrorCode usermult(Mat, Vec, Vec);
11090: PetscCall(MatCreateXXX(comm, ..., &A));
11091: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscVoidFn *)usermult));
11092: .ve
11094: Notes:
11095: See the file `include/petscmat.h` for a complete list of matrix
11096: operations, which all have the form MATOP_<OPERATION>, where
11097: <OPERATION> is the name (in all capital letters) of the
11098: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11100: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11101: sequence as the usual matrix interface routines, since they
11102: are intended to be accessed via the usual matrix interface
11103: routines, e.g.,
11104: .vb
11105: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11106: .ve
11108: In particular each function MUST return `PETSC_SUCCESS` on success and
11109: nonzero on failure.
11111: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11113: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11114: @*/
11115: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, void (*f)(void))
11116: {
11117: PetscFunctionBegin;
11119: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))mat->ops->view) mat->ops->viewnative = mat->ops->view;
11120: (((void (**)(void))mat->ops)[op]) = f;
11121: PetscFunctionReturn(PETSC_SUCCESS);
11122: }
11124: /*@C
11125: MatGetOperation - Gets a matrix operation for any matrix type.
11127: Not Collective
11129: Input Parameters:
11130: + mat - the matrix
11131: - op - the name of the operation
11133: Output Parameter:
11134: . f - the function that provides the operation
11136: Level: developer
11138: Example Usage:
11139: .vb
11140: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11142: MatGetOperation(A, MATOP_MULT, (void (**)(void))&usermult);
11143: .ve
11145: Notes:
11146: See the file include/petscmat.h for a complete list of matrix
11147: operations, which all have the form MATOP_<OPERATION>, where
11148: <OPERATION> is the name (in all capital letters) of the
11149: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11151: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11153: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11154: @*/
11155: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, void (**f)(void))
11156: {
11157: PetscFunctionBegin;
11159: *f = (((void (**)(void))mat->ops)[op]);
11160: PetscFunctionReturn(PETSC_SUCCESS);
11161: }
11163: /*@
11164: MatHasOperation - Determines whether the given matrix supports the particular operation.
11166: Not Collective
11168: Input Parameters:
11169: + mat - the matrix
11170: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11172: Output Parameter:
11173: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11175: Level: advanced
11177: Note:
11178: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11180: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11181: @*/
11182: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11183: {
11184: PetscFunctionBegin;
11186: PetscAssertPointer(has, 3);
11187: if (mat->ops->hasoperation) {
11188: PetscUseTypeMethod(mat, hasoperation, op, has);
11189: } else {
11190: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11191: else {
11192: *has = PETSC_FALSE;
11193: if (op == MATOP_CREATE_SUBMATRIX) {
11194: PetscMPIInt size;
11196: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11197: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11198: }
11199: }
11200: }
11201: PetscFunctionReturn(PETSC_SUCCESS);
11202: }
11204: /*@
11205: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11207: Collective
11209: Input Parameter:
11210: . mat - the matrix
11212: Output Parameter:
11213: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11215: Level: beginner
11217: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11218: @*/
11219: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11220: {
11221: PetscFunctionBegin;
11224: PetscAssertPointer(cong, 2);
11225: if (!mat->rmap || !mat->cmap) {
11226: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11227: PetscFunctionReturn(PETSC_SUCCESS);
11228: }
11229: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11230: PetscCall(PetscLayoutSetUp(mat->rmap));
11231: PetscCall(PetscLayoutSetUp(mat->cmap));
11232: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11233: if (*cong) mat->congruentlayouts = 1;
11234: else mat->congruentlayouts = 0;
11235: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11236: PetscFunctionReturn(PETSC_SUCCESS);
11237: }
11239: PetscErrorCode MatSetInf(Mat A)
11240: {
11241: PetscFunctionBegin;
11242: PetscUseTypeMethod(A, setinf);
11243: PetscFunctionReturn(PETSC_SUCCESS);
11244: }
11246: /*@
11247: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11248: and possibly removes small values from the graph structure.
11250: Collective
11252: Input Parameters:
11253: + A - the matrix
11254: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11255: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11256: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11257: . num_idx - size of 'index' array
11258: - index - array of block indices to use for graph strength of connection weight
11260: Output Parameter:
11261: . graph - the resulting graph
11263: Level: advanced
11265: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11266: @*/
11267: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11268: {
11269: PetscFunctionBegin;
11273: PetscAssertPointer(graph, 7);
11274: PetscCall(PetscLogEventBegin(MAT_CreateGraph, A, 0, 0, 0));
11275: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11276: PetscCall(PetscLogEventEnd(MAT_CreateGraph, A, 0, 0, 0));
11277: PetscFunctionReturn(PETSC_SUCCESS);
11278: }
11280: /*@
11281: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11282: meaning the same memory is used for the matrix, and no new memory is allocated.
11284: Collective
11286: Input Parameters:
11287: + A - the matrix
11288: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11290: Level: intermediate
11292: Developer Note:
11293: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11294: of the arrays in the data structure are unneeded.
11296: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11297: @*/
11298: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11299: {
11300: PetscFunctionBegin;
11302: PetscUseTypeMethod(A, eliminatezeros, keep);
11303: PetscFunctionReturn(PETSC_SUCCESS);
11304: }