Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_CreateGraph;
40: PetscLogEvent MAT_SetValuesBatch;
41: PetscLogEvent MAT_ViennaCLCopyToGPU;
42: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
43: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
44: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
45: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
46: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
47: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
49: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
51: /*@
52: MatSetRandom - Sets all components of a matrix to random numbers.
54: Logically Collective
56: Input Parameters:
57: + x - the matrix
58: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
59: it will create one internally.
61: Example:
62: .vb
63: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
64: MatSetRandom(x,rctx);
65: PetscRandomDestroy(rctx);
66: .ve
68: Level: intermediate
70: Notes:
71: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
73: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
75: It generates an error if used on unassembled sparse matrices that have not been preallocated.
77: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
78: @*/
79: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
80: {
81: PetscRandom randObj = NULL;
83: PetscFunctionBegin;
87: MatCheckPreallocated(x, 1);
89: if (!rctx) {
90: MPI_Comm comm;
91: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
92: PetscCall(PetscRandomCreate(comm, &randObj));
93: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
94: PetscCall(PetscRandomSetFromOptions(randObj));
95: rctx = randObj;
96: }
97: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
98: PetscUseTypeMethod(x, setrandom, rctx);
99: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
101: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
103: PetscCall(PetscRandomDestroy(&randObj));
104: PetscFunctionReturn(PETSC_SUCCESS);
105: }
107: /*@
108: MatCopyHashToXAIJ - copy hash table entries into an XAIJ matrix type
110: Logically Collective
112: Input Parameter:
113: . A - A matrix in unassembled, hash table form
115: Output Parameter:
116: . B - The XAIJ matrix. This can either be `A` or some matrix of equivalent size, e.g. obtained from `A` via `MatDuplicate()`
118: Example:
119: .vb
120: PetscCall(MatDuplicate(A, MAT_DO_NOT_COPY_VALUES, &B));
121: PetscCall(MatCopyHashToXAIJ(A, B));
122: .ve
124: Level: advanced
126: Notes:
127: If `B` is `A`, then the hash table data structure will be destroyed. `B` is assembled
129: .seealso: [](ch_matrices), `Mat`, `MAT_USE_HASH_TABLE`
130: @*/
131: PetscErrorCode MatCopyHashToXAIJ(Mat A, Mat B)
132: {
133: PetscFunctionBegin;
135: PetscUseTypeMethod(A, copyhashtoxaij, B);
136: PetscFunctionReturn(PETSC_SUCCESS);
137: }
139: /*@
140: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
142: Logically Collective
144: Input Parameter:
145: . mat - the factored matrix
147: Output Parameters:
148: + pivot - the pivot value computed
149: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
150: the share the matrix
152: Level: advanced
154: Notes:
155: This routine does not work for factorizations done with external packages.
157: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
159: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
161: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
162: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
163: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
164: @*/
165: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
166: {
167: PetscFunctionBegin;
169: PetscAssertPointer(pivot, 2);
170: PetscAssertPointer(row, 3);
171: *pivot = mat->factorerror_zeropivot_value;
172: *row = mat->factorerror_zeropivot_row;
173: PetscFunctionReturn(PETSC_SUCCESS);
174: }
176: /*@
177: MatFactorGetError - gets the error code from a factorization
179: Logically Collective
181: Input Parameter:
182: . mat - the factored matrix
184: Output Parameter:
185: . err - the error code
187: Level: advanced
189: Note:
190: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
192: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
193: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
194: @*/
195: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
196: {
197: PetscFunctionBegin;
199: PetscAssertPointer(err, 2);
200: *err = mat->factorerrortype;
201: PetscFunctionReturn(PETSC_SUCCESS);
202: }
204: /*@
205: MatFactorClearError - clears the error code in a factorization
207: Logically Collective
209: Input Parameter:
210: . mat - the factored matrix
212: Level: developer
214: Note:
215: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
217: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
218: `MatGetErrorCode()`, `MatFactorError`
219: @*/
220: PetscErrorCode MatFactorClearError(Mat mat)
221: {
222: PetscFunctionBegin;
224: mat->factorerrortype = MAT_FACTOR_NOERROR;
225: mat->factorerror_zeropivot_value = 0.0;
226: mat->factorerror_zeropivot_row = 0;
227: PetscFunctionReturn(PETSC_SUCCESS);
228: }
230: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
231: {
232: Vec r, l;
233: const PetscScalar *al;
234: PetscInt i, nz, gnz, N, n, st;
236: PetscFunctionBegin;
237: PetscCall(MatCreateVecs(mat, &r, &l));
238: if (!cols) { /* nonzero rows */
239: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
240: PetscCall(MatGetSize(mat, &N, NULL));
241: PetscCall(MatGetLocalSize(mat, &n, NULL));
242: PetscCall(VecSet(l, 0.0));
243: PetscCall(VecSetRandom(r, NULL));
244: PetscCall(MatMult(mat, r, l));
245: PetscCall(VecGetArrayRead(l, &al));
246: } else { /* nonzero columns */
247: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
248: PetscCall(MatGetSize(mat, NULL, &N));
249: PetscCall(MatGetLocalSize(mat, NULL, &n));
250: PetscCall(VecSet(r, 0.0));
251: PetscCall(VecSetRandom(l, NULL));
252: PetscCall(MatMultTranspose(mat, l, r));
253: PetscCall(VecGetArrayRead(r, &al));
254: }
255: if (tol <= 0.0) {
256: for (i = 0, nz = 0; i < n; i++)
257: if (al[i] != 0.0) nz++;
258: } else {
259: for (i = 0, nz = 0; i < n; i++)
260: if (PetscAbsScalar(al[i]) > tol) nz++;
261: }
262: PetscCallMPI(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
263: if (gnz != N) {
264: PetscInt *nzr;
265: PetscCall(PetscMalloc1(nz, &nzr));
266: if (nz) {
267: if (tol < 0) {
268: for (i = 0, nz = 0; i < n; i++)
269: if (al[i] != 0.0) nzr[nz++] = i + st;
270: } else {
271: for (i = 0, nz = 0; i < n; i++)
272: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
273: }
274: }
275: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
276: } else *nonzero = NULL;
277: if (!cols) { /* nonzero rows */
278: PetscCall(VecRestoreArrayRead(l, &al));
279: } else {
280: PetscCall(VecRestoreArrayRead(r, &al));
281: }
282: PetscCall(VecDestroy(&l));
283: PetscCall(VecDestroy(&r));
284: PetscFunctionReturn(PETSC_SUCCESS);
285: }
287: /*@
288: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
290: Input Parameter:
291: . mat - the matrix
293: Output Parameter:
294: . keptrows - the rows that are not completely zero
296: Level: intermediate
298: Note:
299: `keptrows` is set to `NULL` if all rows are nonzero.
301: Developer Note:
302: If `keptrows` is not `NULL`, it must be sorted.
304: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
305: @*/
306: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
307: {
308: PetscFunctionBegin;
311: PetscAssertPointer(keptrows, 2);
312: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
313: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
314: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
315: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
316: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
317: PetscFunctionReturn(PETSC_SUCCESS);
318: }
320: /*@
321: MatFindZeroRows - Locate all rows that are completely zero in the matrix
323: Input Parameter:
324: . mat - the matrix
326: Output Parameter:
327: . zerorows - the rows that are completely zero
329: Level: intermediate
331: Note:
332: `zerorows` is set to `NULL` if no rows are zero.
334: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
335: @*/
336: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
337: {
338: IS keptrows;
339: PetscInt m, n;
341: PetscFunctionBegin;
344: PetscAssertPointer(zerorows, 2);
345: PetscCall(MatFindNonzeroRows(mat, &keptrows));
346: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
347: In keeping with this convention, we set zerorows to NULL if there are no zero
348: rows. */
349: if (keptrows == NULL) {
350: *zerorows = NULL;
351: } else {
352: PetscCall(MatGetOwnershipRange(mat, &m, &n));
353: PetscCall(ISComplement(keptrows, m, n, zerorows));
354: PetscCall(ISDestroy(&keptrows));
355: }
356: PetscFunctionReturn(PETSC_SUCCESS);
357: }
359: /*@
360: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
362: Not Collective
364: Input Parameter:
365: . A - the matrix
367: Output Parameter:
368: . a - the diagonal part (which is a SEQUENTIAL matrix)
370: Level: advanced
372: Notes:
373: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
375: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
377: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
378: @*/
379: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
380: {
381: PetscFunctionBegin;
384: PetscAssertPointer(a, 2);
385: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
386: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
387: else {
388: PetscMPIInt size;
390: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
391: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
392: *a = A;
393: }
394: PetscFunctionReturn(PETSC_SUCCESS);
395: }
397: /*@
398: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
400: Collective
402: Input Parameter:
403: . mat - the matrix
405: Output Parameter:
406: . trace - the sum of the diagonal entries
408: Level: advanced
410: .seealso: [](ch_matrices), `Mat`
411: @*/
412: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
413: {
414: Vec diag;
416: PetscFunctionBegin;
418: PetscAssertPointer(trace, 2);
419: PetscCall(MatCreateVecs(mat, &diag, NULL));
420: PetscCall(MatGetDiagonal(mat, diag));
421: PetscCall(VecSum(diag, trace));
422: PetscCall(VecDestroy(&diag));
423: PetscFunctionReturn(PETSC_SUCCESS);
424: }
426: /*@
427: MatRealPart - Zeros out the imaginary part of the matrix
429: Logically Collective
431: Input Parameter:
432: . mat - the matrix
434: Level: advanced
436: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
437: @*/
438: PetscErrorCode MatRealPart(Mat mat)
439: {
440: PetscFunctionBegin;
443: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
444: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
445: MatCheckPreallocated(mat, 1);
446: PetscUseTypeMethod(mat, realpart);
447: PetscFunctionReturn(PETSC_SUCCESS);
448: }
450: /*@C
451: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
453: Collective
455: Input Parameter:
456: . mat - the matrix
458: Output Parameters:
459: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
460: - ghosts - the global indices of the ghost points
462: Level: advanced
464: Note:
465: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
467: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
468: @*/
469: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
470: {
471: PetscFunctionBegin;
474: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
475: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
476: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
477: else {
478: if (nghosts) *nghosts = 0;
479: if (ghosts) *ghosts = NULL;
480: }
481: PetscFunctionReturn(PETSC_SUCCESS);
482: }
484: /*@
485: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
487: Logically Collective
489: Input Parameter:
490: . mat - the matrix
492: Level: advanced
494: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
495: @*/
496: PetscErrorCode MatImaginaryPart(Mat mat)
497: {
498: PetscFunctionBegin;
501: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
502: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
503: MatCheckPreallocated(mat, 1);
504: PetscUseTypeMethod(mat, imaginarypart);
505: PetscFunctionReturn(PETSC_SUCCESS);
506: }
508: /*@
509: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure
511: Not Collective
513: Input Parameter:
514: . mat - the matrix
516: Output Parameters:
517: + missing - is any diagonal entry missing
518: - dd - first diagonal entry that is missing (optional) on this process
520: Level: advanced
522: Note:
523: This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value
525: .seealso: [](ch_matrices), `Mat`
526: @*/
527: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
528: {
529: PetscFunctionBegin;
532: PetscAssertPointer(missing, 2);
533: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
534: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
535: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
536: PetscFunctionReturn(PETSC_SUCCESS);
537: }
539: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
540: /*@C
541: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
542: for each row that you get to ensure that your application does
543: not bleed memory.
545: Not Collective
547: Input Parameters:
548: + mat - the matrix
549: - row - the row to get
551: Output Parameters:
552: + ncols - if not `NULL`, the number of nonzeros in `row`
553: . cols - if not `NULL`, the column numbers
554: - vals - if not `NULL`, the numerical values
556: Level: advanced
558: Notes:
559: This routine is provided for people who need to have direct access
560: to the structure of a matrix. We hope that we provide enough
561: high-level matrix routines that few users will need it.
563: `MatGetRow()` always returns 0-based column indices, regardless of
564: whether the internal representation is 0-based (default) or 1-based.
566: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
567: not wish to extract these quantities.
569: The user can only examine the values extracted with `MatGetRow()`;
570: the values CANNOT be altered. To change the matrix entries, one
571: must use `MatSetValues()`.
573: You can only have one call to `MatGetRow()` outstanding for a particular
574: matrix at a time, per processor. `MatGetRow()` can only obtain rows
575: associated with the given processor, it cannot get rows from the
576: other processors; for that we suggest using `MatCreateSubMatrices()`, then
577: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
578: is in the global number of rows.
580: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
582: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
584: Fortran Note:
585: The calling sequence is
586: .vb
587: MatGetRow(matrix,row,ncols,cols,values,ierr)
588: Mat matrix (input)
589: PetscInt row (input)
590: PetscInt ncols (output)
591: PetscInt cols(maxcols) (output)
592: PetscScalar values(maxcols) output
593: .ve
594: where maxcols >= maximum nonzeros in any row of the matrix.
596: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
597: @*/
598: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
599: {
600: PetscInt incols;
602: PetscFunctionBegin;
605: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
606: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
607: MatCheckPreallocated(mat, 1);
608: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
609: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
610: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
611: if (ncols) *ncols = incols;
612: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
613: PetscFunctionReturn(PETSC_SUCCESS);
614: }
616: /*@
617: MatConjugate - replaces the matrix values with their complex conjugates
619: Logically Collective
621: Input Parameter:
622: . mat - the matrix
624: Level: advanced
626: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
627: @*/
628: PetscErrorCode MatConjugate(Mat mat)
629: {
630: PetscFunctionBegin;
632: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
633: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
634: PetscUseTypeMethod(mat, conjugate);
635: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
636: }
637: PetscFunctionReturn(PETSC_SUCCESS);
638: }
640: /*@C
641: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
643: Not Collective
645: Input Parameters:
646: + mat - the matrix
647: . row - the row to get
648: . ncols - the number of nonzeros
649: . cols - the columns of the nonzeros
650: - vals - if nonzero the column values
652: Level: advanced
654: Notes:
655: This routine should be called after you have finished examining the entries.
657: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
658: us of the array after it has been restored. If you pass `NULL`, it will
659: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
661: Fortran Note:
662: `MatRestoreRow()` MUST be called after `MatGetRow()`
663: before another call to `MatGetRow()` can be made.
665: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
666: @*/
667: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
668: {
669: PetscFunctionBegin;
671: if (ncols) PetscAssertPointer(ncols, 3);
672: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
673: if (!mat->ops->restorerow) PetscFunctionReturn(PETSC_SUCCESS);
674: PetscUseTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
675: if (ncols) *ncols = 0;
676: if (cols) *cols = NULL;
677: if (vals) *vals = NULL;
678: PetscFunctionReturn(PETSC_SUCCESS);
679: }
681: /*@
682: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
683: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
685: Not Collective
687: Input Parameter:
688: . mat - the matrix
690: Level: advanced
692: Note:
693: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
695: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
696: @*/
697: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
698: {
699: PetscFunctionBegin;
702: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
703: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
704: MatCheckPreallocated(mat, 1);
705: if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
706: PetscUseTypeMethod(mat, getrowuppertriangular);
707: PetscFunctionReturn(PETSC_SUCCESS);
708: }
710: /*@
711: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
713: Not Collective
715: Input Parameter:
716: . mat - the matrix
718: Level: advanced
720: Note:
721: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
723: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
724: @*/
725: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
726: {
727: PetscFunctionBegin;
730: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
731: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
732: MatCheckPreallocated(mat, 1);
733: if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
734: PetscUseTypeMethod(mat, restorerowuppertriangular);
735: PetscFunctionReturn(PETSC_SUCCESS);
736: }
738: /*@
739: MatSetOptionsPrefix - Sets the prefix used for searching for all
740: `Mat` options in the database.
742: Logically Collective
744: Input Parameters:
745: + A - the matrix
746: - prefix - the prefix to prepend to all option names
748: Level: advanced
750: Notes:
751: A hyphen (-) must NOT be given at the beginning of the prefix name.
752: The first character of all runtime options is AUTOMATICALLY the hyphen.
754: This is NOT used for options for the factorization of the matrix. Normally the
755: prefix is automatically passed in from the PC calling the factorization. To set
756: it directly use `MatSetOptionsPrefixFactor()`
758: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
759: @*/
760: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
761: {
762: PetscFunctionBegin;
764: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
765: PetscFunctionReturn(PETSC_SUCCESS);
766: }
768: /*@
769: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
770: for matrices created with `MatGetFactor()`
772: Logically Collective
774: Input Parameters:
775: + A - the matrix
776: - prefix - the prefix to prepend to all option names for the factored matrix
778: Level: developer
780: Notes:
781: A hyphen (-) must NOT be given at the beginning of the prefix name.
782: The first character of all runtime options is AUTOMATICALLY the hyphen.
784: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
785: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
787: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
788: @*/
789: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
790: {
791: PetscFunctionBegin;
793: if (prefix) {
794: PetscAssertPointer(prefix, 2);
795: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
796: if (prefix != A->factorprefix) {
797: PetscCall(PetscFree(A->factorprefix));
798: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
799: }
800: } else PetscCall(PetscFree(A->factorprefix));
801: PetscFunctionReturn(PETSC_SUCCESS);
802: }
804: /*@
805: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
806: for matrices created with `MatGetFactor()`
808: Logically Collective
810: Input Parameters:
811: + A - the matrix
812: - prefix - the prefix to prepend to all option names for the factored matrix
814: Level: developer
816: Notes:
817: A hyphen (-) must NOT be given at the beginning of the prefix name.
818: The first character of all runtime options is AUTOMATICALLY the hyphen.
820: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
821: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
823: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
824: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
825: `MatSetOptionsPrefix()`
826: @*/
827: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
828: {
829: size_t len1, len2, new_len;
831: PetscFunctionBegin;
833: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
834: if (!A->factorprefix) {
835: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
836: PetscFunctionReturn(PETSC_SUCCESS);
837: }
838: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
840: PetscCall(PetscStrlen(A->factorprefix, &len1));
841: PetscCall(PetscStrlen(prefix, &len2));
842: new_len = len1 + len2 + 1;
843: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
844: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
845: PetscFunctionReturn(PETSC_SUCCESS);
846: }
848: /*@
849: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
850: matrix options in the database.
852: Logically Collective
854: Input Parameters:
855: + A - the matrix
856: - prefix - the prefix to prepend to all option names
858: Level: advanced
860: Note:
861: A hyphen (-) must NOT be given at the beginning of the prefix name.
862: The first character of all runtime options is AUTOMATICALLY the hyphen.
864: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
865: @*/
866: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
867: {
868: PetscFunctionBegin;
870: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
871: PetscFunctionReturn(PETSC_SUCCESS);
872: }
874: /*@
875: MatGetOptionsPrefix - Gets the prefix used for searching for all
876: matrix options in the database.
878: Not Collective
880: Input Parameter:
881: . A - the matrix
883: Output Parameter:
884: . prefix - pointer to the prefix string used
886: Level: advanced
888: Fortran Note:
889: The user should pass in a string `prefix` of
890: sufficient length to hold the prefix.
892: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
893: @*/
894: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
895: {
896: PetscFunctionBegin;
898: PetscAssertPointer(prefix, 2);
899: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
900: PetscFunctionReturn(PETSC_SUCCESS);
901: }
903: /*@
904: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
906: Not Collective
908: Input Parameter:
909: . A - the matrix
911: Output Parameter:
912: . state - the object state
914: Level: advanced
916: Note:
917: Object state is an integer which gets increased every time
918: the object is changed. By saving and later querying the object state
919: one can determine whether information about the object is still current.
921: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
923: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
924: @*/
925: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
926: {
927: PetscFunctionBegin;
929: PetscAssertPointer(state, 2);
930: PetscCall(PetscObjectStateGet((PetscObject)A, state));
931: PetscFunctionReturn(PETSC_SUCCESS);
932: }
934: /*@
935: MatResetPreallocation - Reset matrix to use the original nonzero pattern provided by the user.
937: Collective
939: Input Parameter:
940: . A - the matrix
942: Level: beginner
944: Notes:
945: The allocated memory will be shrunk after calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
947: Users can reset the preallocation to access the original memory.
949: Currently only supported for `MATAIJ` matrices.
951: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
952: @*/
953: PetscErrorCode MatResetPreallocation(Mat A)
954: {
955: PetscFunctionBegin;
958: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset preallocation after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
959: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
960: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
961: PetscFunctionReturn(PETSC_SUCCESS);
962: }
964: /*@
965: MatResetHash - Reset the matrix so that it will use a hash table for the next round of `MatSetValues()` and `MatAssemblyBegin()`/`MatAssemblyEnd()`.
967: Collective
969: Input Parameter:
970: . A - the matrix
972: Level: intermediate
974: Notes:
975: The matrix will again delete the hash table data structures after following calls to `MatAssemblyBegin()`/`MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
977: Currently only supported for `MATAIJ` matrices.
979: .seealso: [](ch_matrices), `Mat`, `MatResetPreallocation()`
980: @*/
981: PetscErrorCode MatResetHash(Mat A)
982: {
983: PetscFunctionBegin;
986: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset to hash state after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
987: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
988: PetscUseMethod(A, "MatResetHash_C", (Mat), (A));
989: /* These flags are used to determine whether certain setups occur */
990: A->was_assembled = PETSC_FALSE;
991: A->assembled = PETSC_FALSE;
992: /* Log that the state of this object has changed; this will help guarantee that preconditioners get re-setup */
993: PetscCall(PetscObjectStateIncrease((PetscObject)A));
994: PetscFunctionReturn(PETSC_SUCCESS);
995: }
997: /*@
998: MatSetUp - Sets up the internal matrix data structures for later use.
1000: Collective
1002: Input Parameter:
1003: . A - the matrix
1005: Level: intermediate
1007: Notes:
1008: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
1009: setting values in the matrix.
1011: This routine is called internally by other matrix functions when needed so rarely needs to be called by users
1013: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
1014: @*/
1015: PetscErrorCode MatSetUp(Mat A)
1016: {
1017: PetscFunctionBegin;
1019: if (!((PetscObject)A)->type_name) {
1020: PetscMPIInt size;
1022: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1023: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
1024: }
1025: if (!A->preallocated) PetscTryTypeMethod(A, setup);
1026: PetscCall(PetscLayoutSetUp(A->rmap));
1027: PetscCall(PetscLayoutSetUp(A->cmap));
1028: A->preallocated = PETSC_TRUE;
1029: PetscFunctionReturn(PETSC_SUCCESS);
1030: }
1032: #if defined(PETSC_HAVE_SAWS)
1033: #include <petscviewersaws.h>
1034: #endif
1036: /*
1037: If threadsafety is on extraneous matrices may be printed
1039: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
1040: */
1041: #if !defined(PETSC_HAVE_THREADSAFETY)
1042: static PetscInt insidematview = 0;
1043: #endif
1045: /*@
1046: MatViewFromOptions - View properties of the matrix based on options set in the options database
1048: Collective
1050: Input Parameters:
1051: + A - the matrix
1052: . obj - optional additional object that provides the options prefix to use
1053: - name - command line option
1055: Options Database Key:
1056: . -mat_view [viewertype]:... - the viewer and its options
1058: Level: intermediate
1060: Note:
1061: .vb
1062: If no value is provided ascii:stdout is used
1063: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
1064: for example ascii::ascii_info prints just the information about the object not all details
1065: unless :append is given filename opens in write mode, overwriting what was already there
1066: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
1067: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
1068: socket[:port] defaults to the standard output port
1069: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
1070: .ve
1072: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1073: @*/
1074: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1075: {
1076: PetscFunctionBegin;
1078: #if !defined(PETSC_HAVE_THREADSAFETY)
1079: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1080: #endif
1081: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1082: PetscFunctionReturn(PETSC_SUCCESS);
1083: }
1085: /*@
1086: MatView - display information about a matrix in a variety ways
1088: Collective on viewer
1090: Input Parameters:
1091: + mat - the matrix
1092: - viewer - visualization context
1094: Options Database Keys:
1095: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1096: . -mat_view ::ascii_info_detail - Prints more detailed info
1097: . -mat_view - Prints matrix in ASCII format
1098: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1099: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1100: . -display <name> - Sets display name (default is host)
1101: . -draw_pause <sec> - Sets number of seconds to pause after display
1102: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1103: . -viewer_socket_machine <machine> - -
1104: . -viewer_socket_port <port> - -
1105: . -mat_view binary - save matrix to file in binary format
1106: - -viewer_binary_filename <name> - -
1108: Level: beginner
1110: Notes:
1111: The available visualization contexts include
1112: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1113: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1114: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1115: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1117: The user can open alternative visualization contexts with
1118: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1119: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a
1120: specified file; corresponding input uses `MatLoad()`
1121: . `PetscViewerDrawOpen()` - Outputs nonzero matrix structure to
1122: an X window display
1123: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer.
1124: Currently only the `MATSEQDENSE` and `MATAIJ`
1125: matrix types support the Socket viewer.
1127: The user can call `PetscViewerPushFormat()` to specify the output
1128: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1129: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1130: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1131: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1132: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1133: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse
1134: format common among all matrix types
1135: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific
1136: format (which is in many cases the same as the default)
1137: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix
1138: size and structure (not the matrix entries)
1139: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about
1140: the matrix structure (still not vector or matrix entries)
1142: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1143: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1145: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1147: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1148: viewer is used.
1150: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1151: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1153: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1154: and then use the following mouse functions.
1155: .vb
1156: left mouse: zoom in
1157: middle mouse: zoom out
1158: right mouse: continue with the simulation
1159: .ve
1161: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1162: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1163: @*/
1164: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1165: {
1166: PetscInt rows, cols, rbs, cbs;
1167: PetscBool isascii, isstring, issaws;
1168: PetscViewerFormat format;
1169: PetscMPIInt size;
1171: PetscFunctionBegin;
1174: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1177: PetscCall(PetscViewerGetFormat(viewer, &format));
1178: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1179: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1181: #if !defined(PETSC_HAVE_THREADSAFETY)
1182: insidematview++;
1183: #endif
1184: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1185: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1186: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1187: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1189: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1190: if (isascii) {
1191: if (!mat->preallocated) {
1192: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1193: #if !defined(PETSC_HAVE_THREADSAFETY)
1194: insidematview--;
1195: #endif
1196: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1197: PetscFunctionReturn(PETSC_SUCCESS);
1198: }
1199: if (!mat->assembled) {
1200: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1201: #if !defined(PETSC_HAVE_THREADSAFETY)
1202: insidematview--;
1203: #endif
1204: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1205: PetscFunctionReturn(PETSC_SUCCESS);
1206: }
1207: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1208: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1209: MatNullSpace nullsp, transnullsp;
1211: PetscCall(PetscViewerASCIIPushTab(viewer));
1212: PetscCall(MatGetSize(mat, &rows, &cols));
1213: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1214: if (rbs != 1 || cbs != 1) {
1215: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1216: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1217: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1218: if (mat->factortype) {
1219: MatSolverType solver;
1220: PetscCall(MatFactorGetSolverType(mat, &solver));
1221: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1222: }
1223: if (mat->ops->getinfo) {
1224: MatInfo info;
1225: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1226: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1227: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1228: }
1229: PetscCall(MatGetNullSpace(mat, &nullsp));
1230: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1231: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1232: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1233: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1234: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1235: PetscCall(PetscViewerASCIIPushTab(viewer));
1236: PetscCall(MatProductView(mat, viewer));
1237: PetscCall(PetscViewerASCIIPopTab(viewer));
1238: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1239: IS tmp;
1241: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1242: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1243: PetscCall(PetscViewerASCIIPushTab(viewer));
1244: PetscCall(ISView(tmp, viewer));
1245: PetscCall(PetscViewerASCIIPopTab(viewer));
1246: PetscCall(ISDestroy(&tmp));
1247: }
1248: }
1249: } else if (issaws) {
1250: #if defined(PETSC_HAVE_SAWS)
1251: PetscMPIInt rank;
1253: PetscCall(PetscObjectName((PetscObject)mat));
1254: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1255: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1256: #endif
1257: } else if (isstring) {
1258: const char *type;
1259: PetscCall(MatGetType(mat, &type));
1260: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1261: PetscTryTypeMethod(mat, view, viewer);
1262: }
1263: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1264: PetscCall(PetscViewerASCIIPushTab(viewer));
1265: PetscUseTypeMethod(mat, viewnative, viewer);
1266: PetscCall(PetscViewerASCIIPopTab(viewer));
1267: } else if (mat->ops->view) {
1268: PetscCall(PetscViewerASCIIPushTab(viewer));
1269: PetscUseTypeMethod(mat, view, viewer);
1270: PetscCall(PetscViewerASCIIPopTab(viewer));
1271: }
1272: if (isascii) {
1273: PetscCall(PetscViewerGetFormat(viewer, &format));
1274: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1275: }
1276: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1277: #if !defined(PETSC_HAVE_THREADSAFETY)
1278: insidematview--;
1279: #endif
1280: PetscFunctionReturn(PETSC_SUCCESS);
1281: }
1283: #if defined(PETSC_USE_DEBUG)
1284: #include <../src/sys/totalview/tv_data_display.h>
1285: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1286: {
1287: TV_add_row("Local rows", "int", &mat->rmap->n);
1288: TV_add_row("Local columns", "int", &mat->cmap->n);
1289: TV_add_row("Global rows", "int", &mat->rmap->N);
1290: TV_add_row("Global columns", "int", &mat->cmap->N);
1291: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1292: return TV_format_OK;
1293: }
1294: #endif
1296: /*@
1297: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1298: with `MatView()`. The matrix format is determined from the options database.
1299: Generates a parallel MPI matrix if the communicator has more than one
1300: processor. The default matrix type is `MATAIJ`.
1302: Collective
1304: Input Parameters:
1305: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1306: or some related function before a call to `MatLoad()`
1307: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1309: Options Database Key:
1310: . -matload_block_size <bs> - set block size
1312: Level: beginner
1314: Notes:
1315: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1316: `Mat` before calling this routine if you wish to set it from the options database.
1318: `MatLoad()` automatically loads into the options database any options
1319: given in the file filename.info where filename is the name of the file
1320: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1321: file will be ignored if you use the -viewer_binary_skip_info option.
1323: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1324: sets the default matrix type AIJ and sets the local and global sizes.
1325: If type and/or size is already set, then the same are used.
1327: In parallel, each processor can load a subset of rows (or the
1328: entire matrix). This routine is especially useful when a large
1329: matrix is stored on disk and only part of it is desired on each
1330: processor. For example, a parallel solver may access only some of
1331: the rows from each processor. The algorithm used here reads
1332: relatively small blocks of data rather than reading the entire
1333: matrix and then subsetting it.
1335: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1336: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1337: or the sequence like
1338: .vb
1339: `PetscViewer` v;
1340: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1341: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1342: `PetscViewerSetFromOptions`(v);
1343: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1344: `PetscViewerFileSetName`(v,"datafile");
1345: .ve
1346: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1347: $ -viewer_type {binary, hdf5}
1349: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1350: and src/mat/tutorials/ex10.c with the second approach.
1352: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1353: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1354: Multiple objects, both matrices and vectors, can be stored within the same file.
1355: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1357: Most users should not need to know the details of the binary storage
1358: format, since `MatLoad()` and `MatView()` completely hide these details.
1359: But for anyone who is interested, the standard binary matrix storage
1360: format is
1362: .vb
1363: PetscInt MAT_FILE_CLASSID
1364: PetscInt number of rows
1365: PetscInt number of columns
1366: PetscInt total number of nonzeros
1367: PetscInt *number nonzeros in each row
1368: PetscInt *column indices of all nonzeros (starting index is zero)
1369: PetscScalar *values of all nonzeros
1370: .ve
1371: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1372: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1373: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1375: PETSc automatically does the byte swapping for
1376: machines that store the bytes reversed. Thus if you write your own binary
1377: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1378: and `PetscBinaryWrite()` to see how this may be done.
1380: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1381: Each processor's chunk is loaded independently by its owning MPI process.
1382: Multiple objects, both matrices and vectors, can be stored within the same file.
1383: They are looked up by their PetscObject name.
1385: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1386: by default the same structure and naming of the AIJ arrays and column count
1387: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1388: $ save example.mat A b -v7.3
1389: can be directly read by this routine (see Reference 1 for details).
1391: Depending on your MATLAB version, this format might be a default,
1392: otherwise you can set it as default in Preferences.
1394: Unless -nocompression flag is used to save the file in MATLAB,
1395: PETSc must be configured with ZLIB package.
1397: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1399: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1401: Corresponding `MatView()` is not yet implemented.
1403: The loaded matrix is actually a transpose of the original one in MATLAB,
1404: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1405: With this format, matrix is automatically transposed by PETSc,
1406: unless the matrix is marked as SPD or symmetric
1407: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1409: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1411: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1412: @*/
1413: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1414: {
1415: PetscBool flg;
1417: PetscFunctionBegin;
1421: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1423: flg = PETSC_FALSE;
1424: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1425: if (flg) {
1426: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1427: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1428: }
1429: flg = PETSC_FALSE;
1430: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1431: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1433: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1434: PetscUseTypeMethod(mat, load, viewer);
1435: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1436: PetscFunctionReturn(PETSC_SUCCESS);
1437: }
1439: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1440: {
1441: Mat_Redundant *redund = *redundant;
1443: PetscFunctionBegin;
1444: if (redund) {
1445: if (redund->matseq) { /* via MatCreateSubMatrices() */
1446: PetscCall(ISDestroy(&redund->isrow));
1447: PetscCall(ISDestroy(&redund->iscol));
1448: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1449: } else {
1450: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1451: PetscCall(PetscFree(redund->sbuf_j));
1452: PetscCall(PetscFree(redund->sbuf_a));
1453: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1454: PetscCall(PetscFree(redund->rbuf_j[i]));
1455: PetscCall(PetscFree(redund->rbuf_a[i]));
1456: }
1457: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1458: }
1460: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1461: PetscCall(PetscFree(redund));
1462: }
1463: PetscFunctionReturn(PETSC_SUCCESS);
1464: }
1466: /*@
1467: MatDestroy - Frees space taken by a matrix.
1469: Collective
1471: Input Parameter:
1472: . A - the matrix
1474: Level: beginner
1476: Developer Note:
1477: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1478: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1479: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1480: if changes are needed here.
1482: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1483: @*/
1484: PetscErrorCode MatDestroy(Mat *A)
1485: {
1486: PetscFunctionBegin;
1487: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1489: if (--((PetscObject)*A)->refct > 0) {
1490: *A = NULL;
1491: PetscFunctionReturn(PETSC_SUCCESS);
1492: }
1494: /* if memory was published with SAWs then destroy it */
1495: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1496: PetscTryTypeMethod(*A, destroy);
1498: PetscCall(PetscFree((*A)->factorprefix));
1499: PetscCall(PetscFree((*A)->defaultvectype));
1500: PetscCall(PetscFree((*A)->defaultrandtype));
1501: PetscCall(PetscFree((*A)->bsizes));
1502: PetscCall(PetscFree((*A)->solvertype));
1503: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1504: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1505: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1506: PetscCall(MatProductClear(*A));
1507: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1508: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1509: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1510: PetscCall(MatDestroy(&(*A)->schur));
1511: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1512: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1513: PetscCall(PetscHeaderDestroy(A));
1514: PetscFunctionReturn(PETSC_SUCCESS);
1515: }
1517: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1518: /*@
1519: MatSetValues - Inserts or adds a block of values into a matrix.
1520: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1521: MUST be called after all calls to `MatSetValues()` have been completed.
1523: Not Collective
1525: Input Parameters:
1526: + mat - the matrix
1527: . v - a logically two-dimensional array of values
1528: . m - the number of rows
1529: . idxm - the global indices of the rows
1530: . n - the number of columns
1531: . idxn - the global indices of the columns
1532: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1534: Level: beginner
1536: Notes:
1537: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1539: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1540: options cannot be mixed without intervening calls to the assembly
1541: routines.
1543: `MatSetValues()` uses 0-based row and column numbers in Fortran
1544: as well as in C.
1546: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1547: simply ignored. This allows easily inserting element stiffness matrices
1548: with homogeneous Dirichlet boundary conditions that you don't want represented
1549: in the matrix.
1551: Efficiency Alert:
1552: The routine `MatSetValuesBlocked()` may offer much better efficiency
1553: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1555: Fortran Notes:
1556: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1557: .vb
1558: MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES)
1559: .ve
1561: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1563: Developer Note:
1564: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1565: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1567: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1568: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1569: @*/
1570: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1571: {
1572: PetscFunctionBeginHot;
1575: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1576: PetscAssertPointer(idxm, 3);
1577: PetscAssertPointer(idxn, 5);
1578: MatCheckPreallocated(mat, 1);
1580: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1581: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1583: if (PetscDefined(USE_DEBUG)) {
1584: PetscInt i, j;
1586: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1587: if (v) {
1588: for (i = 0; i < m; i++) {
1589: for (j = 0; j < n; j++) {
1590: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1591: #if defined(PETSC_USE_COMPLEX)
1592: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1593: #else
1594: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1595: #endif
1596: }
1597: }
1598: }
1599: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1600: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1601: }
1603: if (mat->assembled) {
1604: mat->was_assembled = PETSC_TRUE;
1605: mat->assembled = PETSC_FALSE;
1606: }
1607: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1608: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1609: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1610: PetscFunctionReturn(PETSC_SUCCESS);
1611: }
1613: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1614: /*@
1615: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1616: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1617: MUST be called after all calls to `MatSetValues()` have been completed.
1619: Not Collective
1621: Input Parameters:
1622: + mat - the matrix
1623: . v - a logically two-dimensional array of values
1624: . ism - the rows to provide
1625: . isn - the columns to provide
1626: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1628: Level: beginner
1630: Notes:
1631: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1633: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1634: options cannot be mixed without intervening calls to the assembly
1635: routines.
1637: `MatSetValues()` uses 0-based row and column numbers in Fortran
1638: as well as in C.
1640: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1641: simply ignored. This allows easily inserting element stiffness matrices
1642: with homogeneous Dirichlet boundary conditions that you don't want represented
1643: in the matrix.
1645: Efficiency Alert:
1646: The routine `MatSetValuesBlocked()` may offer much better efficiency
1647: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1649: This is currently not optimized for any particular `ISType`
1651: Developer Note:
1652: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1653: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1655: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1656: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1657: @*/
1658: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1659: {
1660: PetscInt m, n;
1661: const PetscInt *rows, *cols;
1663: PetscFunctionBeginHot;
1665: PetscCall(ISGetIndices(ism, &rows));
1666: PetscCall(ISGetIndices(isn, &cols));
1667: PetscCall(ISGetLocalSize(ism, &m));
1668: PetscCall(ISGetLocalSize(isn, &n));
1669: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1670: PetscCall(ISRestoreIndices(ism, &rows));
1671: PetscCall(ISRestoreIndices(isn, &cols));
1672: PetscFunctionReturn(PETSC_SUCCESS);
1673: }
1675: /*@
1676: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1677: values into a matrix
1679: Not Collective
1681: Input Parameters:
1682: + mat - the matrix
1683: . row - the (block) row to set
1684: - v - a logically two-dimensional array of values
1686: Level: intermediate
1688: Notes:
1689: The values, `v`, are column-oriented (for the block version) and sorted
1691: All the nonzero values in `row` must be provided
1693: The matrix must have previously had its column indices set, likely by having been assembled.
1695: `row` must belong to this MPI process
1697: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1698: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1699: @*/
1700: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1701: {
1702: PetscInt globalrow;
1704: PetscFunctionBegin;
1707: PetscAssertPointer(v, 3);
1708: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1709: PetscCall(MatSetValuesRow(mat, globalrow, v));
1710: PetscFunctionReturn(PETSC_SUCCESS);
1711: }
1713: /*@
1714: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1715: values into a matrix
1717: Not Collective
1719: Input Parameters:
1720: + mat - the matrix
1721: . row - the (block) row to set
1722: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1724: Level: advanced
1726: Notes:
1727: The values, `v`, are column-oriented for the block version.
1729: All the nonzeros in `row` must be provided
1731: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1733: `row` must belong to this process
1735: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1736: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1737: @*/
1738: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1739: {
1740: PetscFunctionBeginHot;
1743: MatCheckPreallocated(mat, 1);
1744: PetscAssertPointer(v, 3);
1745: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1746: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1747: mat->insertmode = INSERT_VALUES;
1749: if (mat->assembled) {
1750: mat->was_assembled = PETSC_TRUE;
1751: mat->assembled = PETSC_FALSE;
1752: }
1753: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1754: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1755: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1756: PetscFunctionReturn(PETSC_SUCCESS);
1757: }
1759: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1760: /*@
1761: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1762: Using structured grid indexing
1764: Not Collective
1766: Input Parameters:
1767: + mat - the matrix
1768: . m - number of rows being entered
1769: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1770: . n - number of columns being entered
1771: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1772: . v - a logically two-dimensional array of values
1773: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1775: Level: beginner
1777: Notes:
1778: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1780: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1781: options cannot be mixed without intervening calls to the assembly
1782: routines.
1784: The grid coordinates are across the entire grid, not just the local portion
1786: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1787: as well as in C.
1789: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1791: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1792: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1794: The columns and rows in the stencil passed in MUST be contained within the
1795: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1796: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1797: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1798: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1800: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1801: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1802: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1803: `DM_BOUNDARY_PERIODIC` boundary type.
1805: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1806: a single value per point) you can skip filling those indices.
1808: Inspired by the structured grid interface to the HYPRE package
1809: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1811: Efficiency Alert:
1812: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1813: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1815: Fortran Note:
1816: `idxm` and `idxn` should be declared as
1817: $ MatStencil idxm(4,m),idxn(4,n)
1818: and the values inserted using
1819: .vb
1820: idxm(MatStencil_i,1) = i
1821: idxm(MatStencil_j,1) = j
1822: idxm(MatStencil_k,1) = k
1823: idxm(MatStencil_c,1) = c
1824: etc
1825: .ve
1827: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1828: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1829: @*/
1830: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1831: {
1832: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1833: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1834: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1836: PetscFunctionBegin;
1837: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1840: PetscAssertPointer(idxm, 3);
1841: PetscAssertPointer(idxn, 5);
1843: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1844: jdxm = buf;
1845: jdxn = buf + m;
1846: } else {
1847: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1848: jdxm = bufm;
1849: jdxn = bufn;
1850: }
1851: for (i = 0; i < m; i++) {
1852: for (j = 0; j < 3 - sdim; j++) dxm++;
1853: tmp = *dxm++ - starts[0];
1854: for (j = 0; j < dim - 1; j++) {
1855: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1856: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1857: }
1858: if (mat->stencil.noc) dxm++;
1859: jdxm[i] = tmp;
1860: }
1861: for (i = 0; i < n; i++) {
1862: for (j = 0; j < 3 - sdim; j++) dxn++;
1863: tmp = *dxn++ - starts[0];
1864: for (j = 0; j < dim - 1; j++) {
1865: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1866: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1867: }
1868: if (mat->stencil.noc) dxn++;
1869: jdxn[i] = tmp;
1870: }
1871: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1872: PetscCall(PetscFree2(bufm, bufn));
1873: PetscFunctionReturn(PETSC_SUCCESS);
1874: }
1876: /*@
1877: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1878: Using structured grid indexing
1880: Not Collective
1882: Input Parameters:
1883: + mat - the matrix
1884: . m - number of rows being entered
1885: . idxm - grid coordinates for matrix rows being entered
1886: . n - number of columns being entered
1887: . idxn - grid coordinates for matrix columns being entered
1888: . v - a logically two-dimensional array of values
1889: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1891: Level: beginner
1893: Notes:
1894: By default the values, `v`, are row-oriented and unsorted.
1895: See `MatSetOption()` for other options.
1897: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1898: options cannot be mixed without intervening calls to the assembly
1899: routines.
1901: The grid coordinates are across the entire grid, not just the local portion
1903: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1904: as well as in C.
1906: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1908: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1909: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1911: The columns and rows in the stencil passed in MUST be contained within the
1912: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1913: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1914: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1915: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1917: Negative indices may be passed in idxm and idxn, these rows and columns are
1918: simply ignored. This allows easily inserting element stiffness matrices
1919: with homogeneous Dirichlet boundary conditions that you don't want represented
1920: in the matrix.
1922: Inspired by the structured grid interface to the HYPRE package
1923: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1925: Fortran Note:
1926: `idxm` and `idxn` should be declared as
1927: $ MatStencil idxm(4,m),idxn(4,n)
1928: and the values inserted using
1929: .vb
1930: idxm(MatStencil_i,1) = i
1931: idxm(MatStencil_j,1) = j
1932: idxm(MatStencil_k,1) = k
1933: etc
1934: .ve
1936: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1937: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1938: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1939: @*/
1940: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1941: {
1942: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1943: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1944: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1946: PetscFunctionBegin;
1947: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1950: PetscAssertPointer(idxm, 3);
1951: PetscAssertPointer(idxn, 5);
1952: PetscAssertPointer(v, 6);
1954: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1955: jdxm = buf;
1956: jdxn = buf + m;
1957: } else {
1958: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1959: jdxm = bufm;
1960: jdxn = bufn;
1961: }
1962: for (i = 0; i < m; i++) {
1963: for (j = 0; j < 3 - sdim; j++) dxm++;
1964: tmp = *dxm++ - starts[0];
1965: for (j = 0; j < sdim - 1; j++) {
1966: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1967: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1968: }
1969: dxm++;
1970: jdxm[i] = tmp;
1971: }
1972: for (i = 0; i < n; i++) {
1973: for (j = 0; j < 3 - sdim; j++) dxn++;
1974: tmp = *dxn++ - starts[0];
1975: for (j = 0; j < sdim - 1; j++) {
1976: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1977: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1978: }
1979: dxn++;
1980: jdxn[i] = tmp;
1981: }
1982: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1983: PetscCall(PetscFree2(bufm, bufn));
1984: PetscFunctionReturn(PETSC_SUCCESS);
1985: }
1987: /*@
1988: MatSetStencil - Sets the grid information for setting values into a matrix via
1989: `MatSetValuesStencil()`
1991: Not Collective
1993: Input Parameters:
1994: + mat - the matrix
1995: . dim - dimension of the grid 1, 2, or 3
1996: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1997: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1998: - dof - number of degrees of freedom per node
2000: Level: beginner
2002: Notes:
2003: Inspired by the structured grid interface to the HYPRE package
2004: (www.llnl.gov/CASC/hyper)
2006: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
2007: user.
2009: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
2010: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
2011: @*/
2012: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
2013: {
2014: PetscFunctionBegin;
2016: PetscAssertPointer(dims, 3);
2017: PetscAssertPointer(starts, 4);
2019: mat->stencil.dim = dim + (dof > 1);
2020: for (PetscInt i = 0; i < dim; i++) {
2021: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
2022: mat->stencil.starts[i] = starts[dim - i - 1];
2023: }
2024: mat->stencil.dims[dim] = dof;
2025: mat->stencil.starts[dim] = 0;
2026: mat->stencil.noc = (PetscBool)(dof == 1);
2027: PetscFunctionReturn(PETSC_SUCCESS);
2028: }
2030: /*@
2031: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
2033: Not Collective
2035: Input Parameters:
2036: + mat - the matrix
2037: . v - a logically two-dimensional array of values
2038: . m - the number of block rows
2039: . idxm - the global block indices
2040: . n - the number of block columns
2041: . idxn - the global block indices
2042: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
2044: Level: intermediate
2046: Notes:
2047: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
2048: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
2050: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
2051: NOT the total number of rows/columns; for example, if the block size is 2 and
2052: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
2053: The values in `idxm` would be 1 2; that is the first index for each block divided by
2054: the block size.
2056: You must call `MatSetBlockSize()` when constructing this matrix (before
2057: preallocating it).
2059: By default the values, `v`, are row-oriented, so the layout of
2060: `v` is the same as for `MatSetValues()`. See `MatSetOption()` for other options.
2062: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
2063: options cannot be mixed without intervening calls to the assembly
2064: routines.
2066: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
2067: as well as in C.
2069: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2070: simply ignored. This allows easily inserting element stiffness matrices
2071: with homogeneous Dirichlet boundary conditions that you don't want represented
2072: in the matrix.
2074: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2075: internal searching must be done to determine where to place the
2076: data in the matrix storage space. By instead inserting blocks of
2077: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2078: reduced.
2080: Example:
2081: .vb
2082: Suppose m=n=2 and block size(bs) = 2 The array is
2084: 1 2 | 3 4
2085: 5 6 | 7 8
2086: - - - | - - -
2087: 9 10 | 11 12
2088: 13 14 | 15 16
2090: v[] should be passed in like
2091: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2093: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2094: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2095: .ve
2097: Fortran Notes:
2098: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2099: .vb
2100: MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES)
2101: .ve
2103: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2105: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2106: @*/
2107: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2108: {
2109: PetscFunctionBeginHot;
2112: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2113: PetscAssertPointer(idxm, 3);
2114: PetscAssertPointer(idxn, 5);
2115: MatCheckPreallocated(mat, 1);
2116: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2117: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2118: if (PetscDefined(USE_DEBUG)) {
2119: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2120: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2121: }
2122: if (PetscDefined(USE_DEBUG)) {
2123: PetscInt rbs, cbs, M, N, i;
2124: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2125: PetscCall(MatGetSize(mat, &M, &N));
2126: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2127: for (i = 0; i < n; i++)
2128: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2129: }
2130: if (mat->assembled) {
2131: mat->was_assembled = PETSC_TRUE;
2132: mat->assembled = PETSC_FALSE;
2133: }
2134: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2135: if (mat->ops->setvaluesblocked) {
2136: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2137: } else {
2138: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2139: PetscInt i, j, bs, cbs;
2141: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2142: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2143: iidxm = buf;
2144: iidxn = buf + m * bs;
2145: } else {
2146: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2147: iidxm = bufr;
2148: iidxn = bufc;
2149: }
2150: for (i = 0; i < m; i++) {
2151: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2152: }
2153: if (m != n || bs != cbs || idxm != idxn) {
2154: for (i = 0; i < n; i++) {
2155: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2156: }
2157: } else iidxn = iidxm;
2158: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2159: PetscCall(PetscFree2(bufr, bufc));
2160: }
2161: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2162: PetscFunctionReturn(PETSC_SUCCESS);
2163: }
2165: /*@
2166: MatGetValues - Gets a block of local values from a matrix.
2168: Not Collective; can only return values that are owned by the give process
2170: Input Parameters:
2171: + mat - the matrix
2172: . v - a logically two-dimensional array for storing the values
2173: . m - the number of rows
2174: . idxm - the global indices of the rows
2175: . n - the number of columns
2176: - idxn - the global indices of the columns
2178: Level: advanced
2180: Notes:
2181: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2182: The values, `v`, are then returned in a row-oriented format,
2183: analogous to that used by default in `MatSetValues()`.
2185: `MatGetValues()` uses 0-based row and column numbers in
2186: Fortran as well as in C.
2188: `MatGetValues()` requires that the matrix has been assembled
2189: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2190: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2191: without intermediate matrix assembly.
2193: Negative row or column indices will be ignored and those locations in `v` will be
2194: left unchanged.
2196: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2197: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2198: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2200: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2201: @*/
2202: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2203: {
2204: PetscFunctionBegin;
2207: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2208: PetscAssertPointer(idxm, 3);
2209: PetscAssertPointer(idxn, 5);
2210: PetscAssertPointer(v, 6);
2211: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2212: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2213: MatCheckPreallocated(mat, 1);
2215: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2216: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2217: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2218: PetscFunctionReturn(PETSC_SUCCESS);
2219: }
2221: /*@
2222: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2223: defined previously by `MatSetLocalToGlobalMapping()`
2225: Not Collective
2227: Input Parameters:
2228: + mat - the matrix
2229: . nrow - number of rows
2230: . irow - the row local indices
2231: . ncol - number of columns
2232: - icol - the column local indices
2234: Output Parameter:
2235: . y - a logically two-dimensional array of values
2237: Level: advanced
2239: Notes:
2240: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2242: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2243: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2244: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2245: with `MatSetLocalToGlobalMapping()`.
2247: Developer Note:
2248: This is labelled with C so does not automatically generate Fortran stubs and interfaces
2249: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2251: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2252: `MatSetValuesLocal()`, `MatGetValues()`
2253: @*/
2254: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2255: {
2256: PetscFunctionBeginHot;
2259: MatCheckPreallocated(mat, 1);
2260: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2261: PetscAssertPointer(irow, 3);
2262: PetscAssertPointer(icol, 5);
2263: if (PetscDefined(USE_DEBUG)) {
2264: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2265: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2266: }
2267: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2268: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2269: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2270: else {
2271: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2272: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2273: irowm = buf;
2274: icolm = buf + nrow;
2275: } else {
2276: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2277: irowm = bufr;
2278: icolm = bufc;
2279: }
2280: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2281: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2282: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2283: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2284: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2285: PetscCall(PetscFree2(bufr, bufc));
2286: }
2287: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2288: PetscFunctionReturn(PETSC_SUCCESS);
2289: }
2291: /*@
2292: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2293: the same size. Currently, this can only be called once and creates the given matrix.
2295: Not Collective
2297: Input Parameters:
2298: + mat - the matrix
2299: . nb - the number of blocks
2300: . bs - the number of rows (and columns) in each block
2301: . rows - a concatenation of the rows for each block
2302: - v - a concatenation of logically two-dimensional arrays of values
2304: Level: advanced
2306: Notes:
2307: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2309: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2311: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2312: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2313: @*/
2314: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2315: {
2316: PetscFunctionBegin;
2319: PetscAssertPointer(rows, 4);
2320: PetscAssertPointer(v, 5);
2321: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2323: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2324: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2325: else {
2326: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2327: }
2328: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2329: PetscFunctionReturn(PETSC_SUCCESS);
2330: }
2332: /*@
2333: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2334: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2335: using a local (per-processor) numbering.
2337: Not Collective
2339: Input Parameters:
2340: + x - the matrix
2341: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2342: - cmapping - column mapping
2344: Level: intermediate
2346: Note:
2347: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2349: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2350: @*/
2351: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2352: {
2353: PetscFunctionBegin;
2358: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2359: else {
2360: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2361: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2362: }
2363: PetscFunctionReturn(PETSC_SUCCESS);
2364: }
2366: /*@
2367: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2369: Not Collective
2371: Input Parameter:
2372: . A - the matrix
2374: Output Parameters:
2375: + rmapping - row mapping
2376: - cmapping - column mapping
2378: Level: advanced
2380: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2381: @*/
2382: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2383: {
2384: PetscFunctionBegin;
2387: if (rmapping) {
2388: PetscAssertPointer(rmapping, 2);
2389: *rmapping = A->rmap->mapping;
2390: }
2391: if (cmapping) {
2392: PetscAssertPointer(cmapping, 3);
2393: *cmapping = A->cmap->mapping;
2394: }
2395: PetscFunctionReturn(PETSC_SUCCESS);
2396: }
2398: /*@
2399: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2401: Logically Collective
2403: Input Parameters:
2404: + A - the matrix
2405: . rmap - row layout
2406: - cmap - column layout
2408: Level: advanced
2410: Note:
2411: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2413: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2414: @*/
2415: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2416: {
2417: PetscFunctionBegin;
2419: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2420: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2421: PetscFunctionReturn(PETSC_SUCCESS);
2422: }
2424: /*@
2425: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2427: Not Collective
2429: Input Parameter:
2430: . A - the matrix
2432: Output Parameters:
2433: + rmap - row layout
2434: - cmap - column layout
2436: Level: advanced
2438: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2439: @*/
2440: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2441: {
2442: PetscFunctionBegin;
2445: if (rmap) {
2446: PetscAssertPointer(rmap, 2);
2447: *rmap = A->rmap;
2448: }
2449: if (cmap) {
2450: PetscAssertPointer(cmap, 3);
2451: *cmap = A->cmap;
2452: }
2453: PetscFunctionReturn(PETSC_SUCCESS);
2454: }
2456: /*@
2457: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2458: using a local numbering of the rows and columns.
2460: Not Collective
2462: Input Parameters:
2463: + mat - the matrix
2464: . nrow - number of rows
2465: . irow - the row local indices
2466: . ncol - number of columns
2467: . icol - the column local indices
2468: . y - a logically two-dimensional array of values
2469: - addv - either `INSERT_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2471: Level: intermediate
2473: Notes:
2474: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2476: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2477: options cannot be mixed without intervening calls to the assembly
2478: routines.
2480: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2481: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2483: Fortran Notes:
2484: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2485: .vb
2486: MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES)
2487: .ve
2489: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2491: Developer Note:
2492: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2493: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2495: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2496: `MatGetValuesLocal()`
2497: @*/
2498: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2499: {
2500: PetscFunctionBeginHot;
2503: MatCheckPreallocated(mat, 1);
2504: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2505: PetscAssertPointer(irow, 3);
2506: PetscAssertPointer(icol, 5);
2507: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2508: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2509: if (PetscDefined(USE_DEBUG)) {
2510: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2511: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2512: }
2514: if (mat->assembled) {
2515: mat->was_assembled = PETSC_TRUE;
2516: mat->assembled = PETSC_FALSE;
2517: }
2518: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2519: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2520: else {
2521: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2522: const PetscInt *irowm, *icolm;
2524: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2525: bufr = buf;
2526: bufc = buf + nrow;
2527: irowm = bufr;
2528: icolm = bufc;
2529: } else {
2530: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2531: irowm = bufr;
2532: icolm = bufc;
2533: }
2534: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2535: else irowm = irow;
2536: if (mat->cmap->mapping) {
2537: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2538: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2539: } else icolm = irowm;
2540: } else icolm = icol;
2541: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2542: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2543: }
2544: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2545: PetscFunctionReturn(PETSC_SUCCESS);
2546: }
2548: /*@
2549: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2550: using a local ordering of the nodes a block at a time.
2552: Not Collective
2554: Input Parameters:
2555: + mat - the matrix
2556: . nrow - number of rows
2557: . irow - the row local indices
2558: . ncol - number of columns
2559: . icol - the column local indices
2560: . y - a logically two-dimensional array of values
2561: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2563: Level: intermediate
2565: Notes:
2566: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2567: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2569: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2570: options cannot be mixed without intervening calls to the assembly
2571: routines.
2573: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2574: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2576: Fortran Notes:
2577: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2578: .vb
2579: MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES)
2580: .ve
2582: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2584: Developer Note:
2585: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2586: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2588: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2589: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2590: @*/
2591: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2592: {
2593: PetscFunctionBeginHot;
2596: MatCheckPreallocated(mat, 1);
2597: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2598: PetscAssertPointer(irow, 3);
2599: PetscAssertPointer(icol, 5);
2600: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2601: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2602: if (PetscDefined(USE_DEBUG)) {
2603: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2604: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2605: }
2607: if (mat->assembled) {
2608: mat->was_assembled = PETSC_TRUE;
2609: mat->assembled = PETSC_FALSE;
2610: }
2611: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2612: PetscInt irbs, rbs;
2613: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2614: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2615: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2616: }
2617: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2618: PetscInt icbs, cbs;
2619: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2620: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2621: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2622: }
2623: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2624: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2625: else {
2626: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2627: const PetscInt *irowm, *icolm;
2629: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2630: bufr = buf;
2631: bufc = buf + nrow;
2632: irowm = bufr;
2633: icolm = bufc;
2634: } else {
2635: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2636: irowm = bufr;
2637: icolm = bufc;
2638: }
2639: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2640: else irowm = irow;
2641: if (mat->cmap->mapping) {
2642: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2643: PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2644: } else icolm = irowm;
2645: } else icolm = icol;
2646: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2647: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2648: }
2649: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2650: PetscFunctionReturn(PETSC_SUCCESS);
2651: }
2653: /*@
2654: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2656: Collective
2658: Input Parameters:
2659: + mat - the matrix
2660: - x - the vector to be multiplied
2662: Output Parameter:
2663: . y - the result
2665: Level: developer
2667: Note:
2668: The vectors `x` and `y` cannot be the same. I.e., one cannot
2669: call `MatMultDiagonalBlock`(A,y,y).
2671: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2672: @*/
2673: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2674: {
2675: PetscFunctionBegin;
2681: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2682: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2683: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2684: MatCheckPreallocated(mat, 1);
2686: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2687: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2688: PetscFunctionReturn(PETSC_SUCCESS);
2689: }
2691: /*@
2692: MatMult - Computes the matrix-vector product, $y = Ax$.
2694: Neighbor-wise Collective
2696: Input Parameters:
2697: + mat - the matrix
2698: - x - the vector to be multiplied
2700: Output Parameter:
2701: . y - the result
2703: Level: beginner
2705: Note:
2706: The vectors `x` and `y` cannot be the same. I.e., one cannot
2707: call `MatMult`(A,y,y).
2709: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2710: @*/
2711: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2712: {
2713: PetscFunctionBegin;
2717: VecCheckAssembled(x);
2719: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2720: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2721: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2722: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2723: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2724: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2725: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2726: PetscCall(VecSetErrorIfLocked(y, 3));
2727: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2728: MatCheckPreallocated(mat, 1);
2730: PetscCall(VecLockReadPush(x));
2731: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2732: PetscUseTypeMethod(mat, mult, x, y);
2733: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2734: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2735: PetscCall(VecLockReadPop(x));
2736: PetscFunctionReturn(PETSC_SUCCESS);
2737: }
2739: /*@
2740: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2742: Neighbor-wise Collective
2744: Input Parameters:
2745: + mat - the matrix
2746: - x - the vector to be multiplied
2748: Output Parameter:
2749: . y - the result
2751: Level: beginner
2753: Notes:
2754: The vectors `x` and `y` cannot be the same. I.e., one cannot
2755: call `MatMultTranspose`(A,y,y).
2757: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2758: use `MatMultHermitianTranspose()`
2760: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2761: @*/
2762: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2763: {
2764: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2766: PetscFunctionBegin;
2770: VecCheckAssembled(x);
2773: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2774: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2775: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2776: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2777: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2778: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2779: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2780: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2781: MatCheckPreallocated(mat, 1);
2783: if (!mat->ops->multtranspose) {
2784: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2785: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2786: } else op = mat->ops->multtranspose;
2787: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2788: PetscCall(VecLockReadPush(x));
2789: PetscCall((*op)(mat, x, y));
2790: PetscCall(VecLockReadPop(x));
2791: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2792: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2793: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2794: PetscFunctionReturn(PETSC_SUCCESS);
2795: }
2797: /*@
2798: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2800: Neighbor-wise Collective
2802: Input Parameters:
2803: + mat - the matrix
2804: - x - the vector to be multiplied
2806: Output Parameter:
2807: . y - the result
2809: Level: beginner
2811: Notes:
2812: The vectors `x` and `y` cannot be the same. I.e., one cannot
2813: call `MatMultHermitianTranspose`(A,y,y).
2815: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2817: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2819: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2820: @*/
2821: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2822: {
2823: PetscFunctionBegin;
2829: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2830: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2831: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2832: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2833: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2834: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2835: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2836: MatCheckPreallocated(mat, 1);
2838: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2839: #if defined(PETSC_USE_COMPLEX)
2840: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2841: PetscCall(VecLockReadPush(x));
2842: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2843: else PetscUseTypeMethod(mat, mult, x, y);
2844: PetscCall(VecLockReadPop(x));
2845: } else {
2846: Vec w;
2847: PetscCall(VecDuplicate(x, &w));
2848: PetscCall(VecCopy(x, w));
2849: PetscCall(VecConjugate(w));
2850: PetscCall(MatMultTranspose(mat, w, y));
2851: PetscCall(VecDestroy(&w));
2852: PetscCall(VecConjugate(y));
2853: }
2854: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2855: #else
2856: PetscCall(MatMultTranspose(mat, x, y));
2857: #endif
2858: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2859: PetscFunctionReturn(PETSC_SUCCESS);
2860: }
2862: /*@
2863: MatMultAdd - Computes $v3 = v2 + A * v1$.
2865: Neighbor-wise Collective
2867: Input Parameters:
2868: + mat - the matrix
2869: . v1 - the vector to be multiplied by `mat`
2870: - v2 - the vector to be added to the result
2872: Output Parameter:
2873: . v3 - the result
2875: Level: beginner
2877: Note:
2878: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2879: call `MatMultAdd`(A,v1,v2,v1).
2881: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2882: @*/
2883: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2884: {
2885: PetscFunctionBegin;
2892: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2893: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2894: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2895: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2896: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2897: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2898: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2899: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2900: MatCheckPreallocated(mat, 1);
2902: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2903: PetscCall(VecLockReadPush(v1));
2904: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2905: PetscCall(VecLockReadPop(v1));
2906: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2907: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2908: PetscFunctionReturn(PETSC_SUCCESS);
2909: }
2911: /*@
2912: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2914: Neighbor-wise Collective
2916: Input Parameters:
2917: + mat - the matrix
2918: . v1 - the vector to be multiplied by the transpose of the matrix
2919: - v2 - the vector to be added to the result
2921: Output Parameter:
2922: . v3 - the result
2924: Level: beginner
2926: Note:
2927: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2928: call `MatMultTransposeAdd`(A,v1,v2,v1).
2930: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2931: @*/
2932: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2933: {
2934: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2936: PetscFunctionBegin;
2943: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2944: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2945: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2946: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2947: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2948: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2949: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2950: MatCheckPreallocated(mat, 1);
2952: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2953: PetscCall(VecLockReadPush(v1));
2954: PetscCall((*op)(mat, v1, v2, v3));
2955: PetscCall(VecLockReadPop(v1));
2956: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2957: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2958: PetscFunctionReturn(PETSC_SUCCESS);
2959: }
2961: /*@
2962: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2964: Neighbor-wise Collective
2966: Input Parameters:
2967: + mat - the matrix
2968: . v1 - the vector to be multiplied by the Hermitian transpose
2969: - v2 - the vector to be added to the result
2971: Output Parameter:
2972: . v3 - the result
2974: Level: beginner
2976: Note:
2977: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2978: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2980: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2981: @*/
2982: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2983: {
2984: PetscFunctionBegin;
2991: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2992: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2993: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2994: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2995: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2996: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2997: MatCheckPreallocated(mat, 1);
2999: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
3000: PetscCall(VecLockReadPush(v1));
3001: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
3002: else {
3003: Vec w, z;
3004: PetscCall(VecDuplicate(v1, &w));
3005: PetscCall(VecCopy(v1, w));
3006: PetscCall(VecConjugate(w));
3007: PetscCall(VecDuplicate(v3, &z));
3008: PetscCall(MatMultTranspose(mat, w, z));
3009: PetscCall(VecDestroy(&w));
3010: PetscCall(VecConjugate(z));
3011: if (v2 != v3) {
3012: PetscCall(VecWAXPY(v3, 1.0, v2, z));
3013: } else {
3014: PetscCall(VecAXPY(v3, 1.0, z));
3015: }
3016: PetscCall(VecDestroy(&z));
3017: }
3018: PetscCall(VecLockReadPop(v1));
3019: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
3020: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
3021: PetscFunctionReturn(PETSC_SUCCESS);
3022: }
3024: /*@
3025: MatGetFactorType - gets the type of factorization a matrix is
3027: Not Collective
3029: Input Parameter:
3030: . mat - the matrix
3032: Output Parameter:
3033: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3035: Level: intermediate
3037: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3038: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3039: @*/
3040: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
3041: {
3042: PetscFunctionBegin;
3045: PetscAssertPointer(t, 2);
3046: *t = mat->factortype;
3047: PetscFunctionReturn(PETSC_SUCCESS);
3048: }
3050: /*@
3051: MatSetFactorType - sets the type of factorization a matrix is
3053: Logically Collective
3055: Input Parameters:
3056: + mat - the matrix
3057: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3059: Level: intermediate
3061: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3062: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3063: @*/
3064: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
3065: {
3066: PetscFunctionBegin;
3069: mat->factortype = t;
3070: PetscFunctionReturn(PETSC_SUCCESS);
3071: }
3073: /*@
3074: MatGetInfo - Returns information about matrix storage (number of
3075: nonzeros, memory, etc.).
3077: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3079: Input Parameters:
3080: + mat - the matrix
3081: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3083: Output Parameter:
3084: . info - matrix information context
3086: Options Database Key:
3087: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3089: Level: intermediate
3091: Notes:
3092: The `MatInfo` context contains a variety of matrix data, including
3093: number of nonzeros allocated and used, number of mallocs during
3094: matrix assembly, etc. Additional information for factored matrices
3095: is provided (such as the fill ratio, number of mallocs during
3096: factorization, etc.).
3098: Example:
3099: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3100: data within the `MatInfo` context. For example,
3101: .vb
3102: MatInfo info;
3103: Mat A;
3104: double mal, nz_a, nz_u;
3106: MatGetInfo(A, MAT_LOCAL, &info);
3107: mal = info.mallocs;
3108: nz_a = info.nz_allocated;
3109: .ve
3111: Fortran Note:
3112: Declare info as a `MatInfo` array of dimension `MAT_INFO_SIZE`, and then extract the parameters
3113: of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
3114: a complete list of parameter names.
3115: .vb
3116: MatInfo info(MAT_INFO_SIZE)
3117: double precision mal, nz_a
3118: Mat A
3119: integer ierr
3121: call MatGetInfo(A, MAT_LOCAL, info, ierr)
3122: mal = info(MAT_INFO_MALLOCS)
3123: nz_a = info(MAT_INFO_NZ_ALLOCATED)
3124: .ve
3126: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3127: @*/
3128: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3129: {
3130: PetscFunctionBegin;
3133: PetscAssertPointer(info, 3);
3134: MatCheckPreallocated(mat, 1);
3135: PetscUseTypeMethod(mat, getinfo, flag, info);
3136: PetscFunctionReturn(PETSC_SUCCESS);
3137: }
3139: /*
3140: This is used by external packages where it is not easy to get the info from the actual
3141: matrix factorization.
3142: */
3143: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3144: {
3145: PetscFunctionBegin;
3146: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3147: PetscFunctionReturn(PETSC_SUCCESS);
3148: }
3150: /*@
3151: MatLUFactor - Performs in-place LU factorization of matrix.
3153: Collective
3155: Input Parameters:
3156: + mat - the matrix
3157: . row - row permutation
3158: . col - column permutation
3159: - info - options for factorization, includes
3160: .vb
3161: fill - expected fill as ratio of original fill.
3162: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3163: Run with the option -info to determine an optimal value to use
3164: .ve
3166: Level: developer
3168: Notes:
3169: Most users should employ the `KSP` interface for linear solvers
3170: instead of working directly with matrix algebra routines such as this.
3171: See, e.g., `KSPCreate()`.
3173: This changes the state of the matrix to a factored matrix; it cannot be used
3174: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3176: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3177: when not using `KSP`.
3179: Developer Note:
3180: The Fortran interface is not autogenerated as the
3181: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3183: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3184: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3185: @*/
3186: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3187: {
3188: MatFactorInfo tinfo;
3190: PetscFunctionBegin;
3194: if (info) PetscAssertPointer(info, 4);
3196: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3197: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3198: MatCheckPreallocated(mat, 1);
3199: if (!info) {
3200: PetscCall(MatFactorInfoInitialize(&tinfo));
3201: info = &tinfo;
3202: }
3204: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3205: PetscUseTypeMethod(mat, lufactor, row, col, info);
3206: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3207: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3208: PetscFunctionReturn(PETSC_SUCCESS);
3209: }
3211: /*@
3212: MatILUFactor - Performs in-place ILU factorization of matrix.
3214: Collective
3216: Input Parameters:
3217: + mat - the matrix
3218: . row - row permutation
3219: . col - column permutation
3220: - info - structure containing
3221: .vb
3222: levels - number of levels of fill.
3223: expected fill - as ratio of original fill.
3224: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3225: missing diagonal entries)
3226: .ve
3228: Level: developer
3230: Notes:
3231: Most users should employ the `KSP` interface for linear solvers
3232: instead of working directly with matrix algebra routines such as this.
3233: See, e.g., `KSPCreate()`.
3235: Probably really in-place only when level of fill is zero, otherwise allocates
3236: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3237: when not using `KSP`.
3239: Developer Note:
3240: The Fortran interface is not autogenerated as the
3241: interface definition cannot be generated correctly [due to MatFactorInfo]
3243: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3244: @*/
3245: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3246: {
3247: PetscFunctionBegin;
3251: PetscAssertPointer(info, 4);
3253: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3254: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3255: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3256: MatCheckPreallocated(mat, 1);
3258: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3259: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3260: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3261: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3262: PetscFunctionReturn(PETSC_SUCCESS);
3263: }
3265: /*@
3266: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3267: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3269: Collective
3271: Input Parameters:
3272: + fact - the factor matrix obtained with `MatGetFactor()`
3273: . mat - the matrix
3274: . row - the row permutation
3275: . col - the column permutation
3276: - info - options for factorization, includes
3277: .vb
3278: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3279: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3280: .ve
3282: Level: developer
3284: Notes:
3285: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3287: Most users should employ the simplified `KSP` interface for linear solvers
3288: instead of working directly with matrix algebra routines such as this.
3289: See, e.g., `KSPCreate()`.
3291: Developer Note:
3292: The Fortran interface is not autogenerated as the
3293: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3295: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3296: @*/
3297: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3298: {
3299: MatFactorInfo tinfo;
3301: PetscFunctionBegin;
3306: if (info) PetscAssertPointer(info, 5);
3309: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3310: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3311: MatCheckPreallocated(mat, 2);
3312: if (!info) {
3313: PetscCall(MatFactorInfoInitialize(&tinfo));
3314: info = &tinfo;
3315: }
3317: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3318: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3319: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3320: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3321: PetscFunctionReturn(PETSC_SUCCESS);
3322: }
3324: /*@
3325: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3326: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3328: Collective
3330: Input Parameters:
3331: + fact - the factor matrix obtained with `MatGetFactor()`
3332: . mat - the matrix
3333: - info - options for factorization
3335: Level: developer
3337: Notes:
3338: See `MatLUFactor()` for in-place factorization. See
3339: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3341: Most users should employ the `KSP` interface for linear solvers
3342: instead of working directly with matrix algebra routines such as this.
3343: See, e.g., `KSPCreate()`.
3345: Developer Note:
3346: The Fortran interface is not autogenerated as the
3347: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3349: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3350: @*/
3351: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3352: {
3353: MatFactorInfo tinfo;
3355: PetscFunctionBegin;
3360: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3361: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3362: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3364: MatCheckPreallocated(mat, 2);
3365: if (!info) {
3366: PetscCall(MatFactorInfoInitialize(&tinfo));
3367: info = &tinfo;
3368: }
3370: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3371: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3372: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3373: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3374: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3375: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3376: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3377: PetscFunctionReturn(PETSC_SUCCESS);
3378: }
3380: /*@
3381: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3382: symmetric matrix.
3384: Collective
3386: Input Parameters:
3387: + mat - the matrix
3388: . perm - row and column permutations
3389: - info - expected fill as ratio of original fill
3391: Level: developer
3393: Notes:
3394: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3395: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3397: Most users should employ the `KSP` interface for linear solvers
3398: instead of working directly with matrix algebra routines such as this.
3399: See, e.g., `KSPCreate()`.
3401: Developer Note:
3402: The Fortran interface is not autogenerated as the
3403: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3405: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3406: `MatGetOrdering()`
3407: @*/
3408: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3409: {
3410: MatFactorInfo tinfo;
3412: PetscFunctionBegin;
3415: if (info) PetscAssertPointer(info, 3);
3417: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3418: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3419: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3420: MatCheckPreallocated(mat, 1);
3421: if (!info) {
3422: PetscCall(MatFactorInfoInitialize(&tinfo));
3423: info = &tinfo;
3424: }
3426: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3427: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3428: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3429: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3430: PetscFunctionReturn(PETSC_SUCCESS);
3431: }
3433: /*@
3434: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3435: of a symmetric matrix.
3437: Collective
3439: Input Parameters:
3440: + fact - the factor matrix obtained with `MatGetFactor()`
3441: . mat - the matrix
3442: . perm - row and column permutations
3443: - info - options for factorization, includes
3444: .vb
3445: fill - expected fill as ratio of original fill.
3446: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3447: Run with the option -info to determine an optimal value to use
3448: .ve
3450: Level: developer
3452: Notes:
3453: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3454: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3456: Most users should employ the `KSP` interface for linear solvers
3457: instead of working directly with matrix algebra routines such as this.
3458: See, e.g., `KSPCreate()`.
3460: Developer Note:
3461: The Fortran interface is not autogenerated as the
3462: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3464: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3465: `MatGetOrdering()`
3466: @*/
3467: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3468: {
3469: MatFactorInfo tinfo;
3471: PetscFunctionBegin;
3475: if (info) PetscAssertPointer(info, 4);
3478: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3479: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3480: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3481: MatCheckPreallocated(mat, 2);
3482: if (!info) {
3483: PetscCall(MatFactorInfoInitialize(&tinfo));
3484: info = &tinfo;
3485: }
3487: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3488: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3489: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3490: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3491: PetscFunctionReturn(PETSC_SUCCESS);
3492: }
3494: /*@
3495: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3496: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3497: `MatCholeskyFactorSymbolic()`.
3499: Collective
3501: Input Parameters:
3502: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3503: . mat - the initial matrix that is to be factored
3504: - info - options for factorization
3506: Level: developer
3508: Note:
3509: Most users should employ the `KSP` interface for linear solvers
3510: instead of working directly with matrix algebra routines such as this.
3511: See, e.g., `KSPCreate()`.
3513: Developer Note:
3514: The Fortran interface is not autogenerated as the
3515: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3517: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3518: @*/
3519: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3520: {
3521: MatFactorInfo tinfo;
3523: PetscFunctionBegin;
3528: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3529: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3530: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3531: MatCheckPreallocated(mat, 2);
3532: if (!info) {
3533: PetscCall(MatFactorInfoInitialize(&tinfo));
3534: info = &tinfo;
3535: }
3537: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3538: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3539: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3540: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3541: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3542: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3543: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3544: PetscFunctionReturn(PETSC_SUCCESS);
3545: }
3547: /*@
3548: MatQRFactor - Performs in-place QR factorization of matrix.
3550: Collective
3552: Input Parameters:
3553: + mat - the matrix
3554: . col - column permutation
3555: - info - options for factorization, includes
3556: .vb
3557: fill - expected fill as ratio of original fill.
3558: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3559: Run with the option -info to determine an optimal value to use
3560: .ve
3562: Level: developer
3564: Notes:
3565: Most users should employ the `KSP` interface for linear solvers
3566: instead of working directly with matrix algebra routines such as this.
3567: See, e.g., `KSPCreate()`.
3569: This changes the state of the matrix to a factored matrix; it cannot be used
3570: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3572: Developer Note:
3573: The Fortran interface is not autogenerated as the
3574: interface definition cannot be generated correctly [due to MatFactorInfo]
3576: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3577: `MatSetUnfactored()`
3578: @*/
3579: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3580: {
3581: PetscFunctionBegin;
3584: if (info) PetscAssertPointer(info, 3);
3586: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3587: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3588: MatCheckPreallocated(mat, 1);
3589: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3590: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3591: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3592: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3593: PetscFunctionReturn(PETSC_SUCCESS);
3594: }
3596: /*@
3597: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3598: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3600: Collective
3602: Input Parameters:
3603: + fact - the factor matrix obtained with `MatGetFactor()`
3604: . mat - the matrix
3605: . col - column permutation
3606: - info - options for factorization, includes
3607: .vb
3608: fill - expected fill as ratio of original fill.
3609: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3610: Run with the option -info to determine an optimal value to use
3611: .ve
3613: Level: developer
3615: Note:
3616: Most users should employ the `KSP` interface for linear solvers
3617: instead of working directly with matrix algebra routines such as this.
3618: See, e.g., `KSPCreate()`.
3620: Developer Note:
3621: The Fortran interface is not autogenerated as the
3622: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3624: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3625: @*/
3626: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3627: {
3628: MatFactorInfo tinfo;
3630: PetscFunctionBegin;
3634: if (info) PetscAssertPointer(info, 4);
3637: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3638: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3639: MatCheckPreallocated(mat, 2);
3640: if (!info) {
3641: PetscCall(MatFactorInfoInitialize(&tinfo));
3642: info = &tinfo;
3643: }
3645: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3646: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3647: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3648: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3649: PetscFunctionReturn(PETSC_SUCCESS);
3650: }
3652: /*@
3653: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3654: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3656: Collective
3658: Input Parameters:
3659: + fact - the factor matrix obtained with `MatGetFactor()`
3660: . mat - the matrix
3661: - info - options for factorization
3663: Level: developer
3665: Notes:
3666: See `MatQRFactor()` for in-place factorization.
3668: Most users should employ the `KSP` interface for linear solvers
3669: instead of working directly with matrix algebra routines such as this.
3670: See, e.g., `KSPCreate()`.
3672: Developer Note:
3673: The Fortran interface is not autogenerated as the
3674: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3676: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3677: @*/
3678: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3679: {
3680: MatFactorInfo tinfo;
3682: PetscFunctionBegin;
3687: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3688: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3689: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3691: MatCheckPreallocated(mat, 2);
3692: if (!info) {
3693: PetscCall(MatFactorInfoInitialize(&tinfo));
3694: info = &tinfo;
3695: }
3697: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3698: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3699: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3700: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3701: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3702: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3703: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3704: PetscFunctionReturn(PETSC_SUCCESS);
3705: }
3707: /*@
3708: MatSolve - Solves $A x = b$, given a factored matrix.
3710: Neighbor-wise Collective
3712: Input Parameters:
3713: + mat - the factored matrix
3714: - b - the right-hand-side vector
3716: Output Parameter:
3717: . x - the result vector
3719: Level: developer
3721: Notes:
3722: The vectors `b` and `x` cannot be the same. I.e., one cannot
3723: call `MatSolve`(A,x,x).
3725: Most users should employ the `KSP` interface for linear solvers
3726: instead of working directly with matrix algebra routines such as this.
3727: See, e.g., `KSPCreate()`.
3729: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3730: @*/
3731: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3732: {
3733: PetscFunctionBegin;
3738: PetscCheckSameComm(mat, 1, b, 2);
3739: PetscCheckSameComm(mat, 1, x, 3);
3740: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3741: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3742: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3743: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3744: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3745: MatCheckPreallocated(mat, 1);
3747: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3748: PetscCall(VecFlag(x, mat->factorerrortype));
3749: if (mat->factorerrortype) {
3750: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3751: } else PetscUseTypeMethod(mat, solve, b, x);
3752: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3753: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3754: PetscFunctionReturn(PETSC_SUCCESS);
3755: }
3757: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3758: {
3759: Vec b, x;
3760: PetscInt N, i;
3761: PetscErrorCode (*f)(Mat, Vec, Vec);
3762: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3764: PetscFunctionBegin;
3765: if (A->factorerrortype) {
3766: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3767: PetscCall(MatSetInf(X));
3768: PetscFunctionReturn(PETSC_SUCCESS);
3769: }
3770: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3771: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3772: PetscCall(MatBoundToCPU(A, &Abound));
3773: if (!Abound) {
3774: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3775: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3776: }
3777: #if PetscDefined(HAVE_CUDA)
3778: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3779: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3780: #elif PetscDefined(HAVE_HIP)
3781: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3782: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3783: #endif
3784: PetscCall(MatGetSize(B, NULL, &N));
3785: for (i = 0; i < N; i++) {
3786: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3787: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3788: PetscCall((*f)(A, b, x));
3789: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3790: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3791: }
3792: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3793: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3794: PetscFunctionReturn(PETSC_SUCCESS);
3795: }
3797: /*@
3798: MatMatSolve - Solves $A X = B$, given a factored matrix.
3800: Neighbor-wise Collective
3802: Input Parameters:
3803: + A - the factored matrix
3804: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3806: Output Parameter:
3807: . X - the result matrix (dense matrix)
3809: Level: developer
3811: Note:
3812: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3813: otherwise, `B` and `X` cannot be the same.
3815: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3816: @*/
3817: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3818: {
3819: PetscFunctionBegin;
3824: PetscCheckSameComm(A, 1, B, 2);
3825: PetscCheckSameComm(A, 1, X, 3);
3826: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3827: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3828: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3829: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3830: MatCheckPreallocated(A, 1);
3832: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3833: if (!A->ops->matsolve) {
3834: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3835: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3836: } else PetscUseTypeMethod(A, matsolve, B, X);
3837: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3838: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3839: PetscFunctionReturn(PETSC_SUCCESS);
3840: }
3842: /*@
3843: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3845: Neighbor-wise Collective
3847: Input Parameters:
3848: + A - the factored matrix
3849: - B - the right-hand-side matrix (`MATDENSE` matrix)
3851: Output Parameter:
3852: . X - the result matrix (dense matrix)
3854: Level: developer
3856: Note:
3857: The matrices `B` and `X` cannot be the same. I.e., one cannot
3858: call `MatMatSolveTranspose`(A,X,X).
3860: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3861: @*/
3862: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3863: {
3864: PetscFunctionBegin;
3869: PetscCheckSameComm(A, 1, B, 2);
3870: PetscCheckSameComm(A, 1, X, 3);
3871: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3872: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3873: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3874: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3875: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3876: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3877: MatCheckPreallocated(A, 1);
3879: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3880: if (!A->ops->matsolvetranspose) {
3881: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3882: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3883: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3884: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3885: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3886: PetscFunctionReturn(PETSC_SUCCESS);
3887: }
3889: /*@
3890: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3892: Neighbor-wise Collective
3894: Input Parameters:
3895: + A - the factored matrix
3896: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3898: Output Parameter:
3899: . X - the result matrix (dense matrix)
3901: Level: developer
3903: Note:
3904: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3905: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3907: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3908: @*/
3909: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3910: {
3911: PetscFunctionBegin;
3916: PetscCheckSameComm(A, 1, Bt, 2);
3917: PetscCheckSameComm(A, 1, X, 3);
3919: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3920: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3921: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3922: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3923: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3924: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3925: MatCheckPreallocated(A, 1);
3927: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3928: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3929: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3930: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3931: PetscFunctionReturn(PETSC_SUCCESS);
3932: }
3934: /*@
3935: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3936: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3938: Neighbor-wise Collective
3940: Input Parameters:
3941: + mat - the factored matrix
3942: - b - the right-hand-side vector
3944: Output Parameter:
3945: . x - the result vector
3947: Level: developer
3949: Notes:
3950: `MatSolve()` should be used for most applications, as it performs
3951: a forward solve followed by a backward solve.
3953: The vectors `b` and `x` cannot be the same, i.e., one cannot
3954: call `MatForwardSolve`(A,x,x).
3956: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3957: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3958: `MatForwardSolve()` solves $U^T*D y = b$, and
3959: `MatBackwardSolve()` solves $U x = y$.
3960: Thus they do not provide a symmetric preconditioner.
3962: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3963: @*/
3964: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3965: {
3966: PetscFunctionBegin;
3971: PetscCheckSameComm(mat, 1, b, 2);
3972: PetscCheckSameComm(mat, 1, x, 3);
3973: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3974: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3975: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3976: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3977: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3978: MatCheckPreallocated(mat, 1);
3980: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3981: PetscUseTypeMethod(mat, forwardsolve, b, x);
3982: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3983: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3984: PetscFunctionReturn(PETSC_SUCCESS);
3985: }
3987: /*@
3988: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3989: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3991: Neighbor-wise Collective
3993: Input Parameters:
3994: + mat - the factored matrix
3995: - b - the right-hand-side vector
3997: Output Parameter:
3998: . x - the result vector
4000: Level: developer
4002: Notes:
4003: `MatSolve()` should be used for most applications, as it performs
4004: a forward solve followed by a backward solve.
4006: The vectors `b` and `x` cannot be the same. I.e., one cannot
4007: call `MatBackwardSolve`(A,x,x).
4009: For matrix in `MATSEQBAIJ` format with block size larger than 1,
4010: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
4011: `MatForwardSolve()` solves $U^T*D y = b$, and
4012: `MatBackwardSolve()` solves $U x = y$.
4013: Thus they do not provide a symmetric preconditioner.
4015: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
4016: @*/
4017: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
4018: {
4019: PetscFunctionBegin;
4024: PetscCheckSameComm(mat, 1, b, 2);
4025: PetscCheckSameComm(mat, 1, x, 3);
4026: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4027: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4028: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4029: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4030: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4031: MatCheckPreallocated(mat, 1);
4033: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
4034: PetscUseTypeMethod(mat, backwardsolve, b, x);
4035: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
4036: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4037: PetscFunctionReturn(PETSC_SUCCESS);
4038: }
4040: /*@
4041: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
4043: Neighbor-wise Collective
4045: Input Parameters:
4046: + mat - the factored matrix
4047: . b - the right-hand-side vector
4048: - y - the vector to be added to
4050: Output Parameter:
4051: . x - the result vector
4053: Level: developer
4055: Note:
4056: The vectors `b` and `x` cannot be the same. I.e., one cannot
4057: call `MatSolveAdd`(A,x,y,x).
4059: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
4060: @*/
4061: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
4062: {
4063: PetscScalar one = 1.0;
4064: Vec tmp;
4066: PetscFunctionBegin;
4072: PetscCheckSameComm(mat, 1, b, 2);
4073: PetscCheckSameComm(mat, 1, y, 3);
4074: PetscCheckSameComm(mat, 1, x, 4);
4075: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4076: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4077: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4078: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4079: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4080: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4081: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4082: MatCheckPreallocated(mat, 1);
4084: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4085: PetscCall(VecFlag(x, mat->factorerrortype));
4086: if (mat->factorerrortype) {
4087: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4088: } else if (mat->ops->solveadd) {
4089: PetscUseTypeMethod(mat, solveadd, b, y, x);
4090: } else {
4091: /* do the solve then the add manually */
4092: if (x != y) {
4093: PetscCall(MatSolve(mat, b, x));
4094: PetscCall(VecAXPY(x, one, y));
4095: } else {
4096: PetscCall(VecDuplicate(x, &tmp));
4097: PetscCall(VecCopy(x, tmp));
4098: PetscCall(MatSolve(mat, b, x));
4099: PetscCall(VecAXPY(x, one, tmp));
4100: PetscCall(VecDestroy(&tmp));
4101: }
4102: }
4103: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4104: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4105: PetscFunctionReturn(PETSC_SUCCESS);
4106: }
4108: /*@
4109: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4111: Neighbor-wise Collective
4113: Input Parameters:
4114: + mat - the factored matrix
4115: - b - the right-hand-side vector
4117: Output Parameter:
4118: . x - the result vector
4120: Level: developer
4122: Notes:
4123: The vectors `b` and `x` cannot be the same. I.e., one cannot
4124: call `MatSolveTranspose`(A,x,x).
4126: Most users should employ the `KSP` interface for linear solvers
4127: instead of working directly with matrix algebra routines such as this.
4128: See, e.g., `KSPCreate()`.
4130: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4131: @*/
4132: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4133: {
4134: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4136: PetscFunctionBegin;
4141: PetscCheckSameComm(mat, 1, b, 2);
4142: PetscCheckSameComm(mat, 1, x, 3);
4143: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4144: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4145: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4146: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4147: MatCheckPreallocated(mat, 1);
4148: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4149: PetscCall(VecFlag(x, mat->factorerrortype));
4150: if (mat->factorerrortype) {
4151: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4152: } else {
4153: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4154: PetscCall((*f)(mat, b, x));
4155: }
4156: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4157: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4158: PetscFunctionReturn(PETSC_SUCCESS);
4159: }
4161: /*@
4162: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4163: factored matrix.
4165: Neighbor-wise Collective
4167: Input Parameters:
4168: + mat - the factored matrix
4169: . b - the right-hand-side vector
4170: - y - the vector to be added to
4172: Output Parameter:
4173: . x - the result vector
4175: Level: developer
4177: Note:
4178: The vectors `b` and `x` cannot be the same. I.e., one cannot
4179: call `MatSolveTransposeAdd`(A,x,y,x).
4181: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4182: @*/
4183: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4184: {
4185: PetscScalar one = 1.0;
4186: Vec tmp;
4187: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4189: PetscFunctionBegin;
4195: PetscCheckSameComm(mat, 1, b, 2);
4196: PetscCheckSameComm(mat, 1, y, 3);
4197: PetscCheckSameComm(mat, 1, x, 4);
4198: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4199: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4200: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4201: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4202: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4203: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4204: MatCheckPreallocated(mat, 1);
4206: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4207: PetscCall(VecFlag(x, mat->factorerrortype));
4208: if (mat->factorerrortype) {
4209: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4210: } else if (f) {
4211: PetscCall((*f)(mat, b, y, x));
4212: } else {
4213: /* do the solve then the add manually */
4214: if (x != y) {
4215: PetscCall(MatSolveTranspose(mat, b, x));
4216: PetscCall(VecAXPY(x, one, y));
4217: } else {
4218: PetscCall(VecDuplicate(x, &tmp));
4219: PetscCall(VecCopy(x, tmp));
4220: PetscCall(MatSolveTranspose(mat, b, x));
4221: PetscCall(VecAXPY(x, one, tmp));
4222: PetscCall(VecDestroy(&tmp));
4223: }
4224: }
4225: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4226: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4227: PetscFunctionReturn(PETSC_SUCCESS);
4228: }
4230: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4231: /*@
4232: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4234: Neighbor-wise Collective
4236: Input Parameters:
4237: + mat - the matrix
4238: . b - the right-hand side
4239: . omega - the relaxation factor
4240: . flag - flag indicating the type of SOR (see below)
4241: . shift - diagonal shift
4242: . its - the number of iterations
4243: - lits - the number of local iterations
4245: Output Parameter:
4246: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4248: SOR Flags:
4249: + `SOR_FORWARD_SWEEP` - forward SOR
4250: . `SOR_BACKWARD_SWEEP` - backward SOR
4251: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4252: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4253: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4254: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4255: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4256: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4257: upper/lower triangular part of matrix to
4258: vector (with omega)
4259: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4261: Level: developer
4263: Notes:
4264: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4265: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4266: on each processor.
4268: Application programmers will not generally use `MatSOR()` directly,
4269: but instead will employ the `KSP`/`PC` interface.
4271: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4273: Most users should employ the `KSP` interface for linear solvers
4274: instead of working directly with matrix algebra routines such as this.
4275: See, e.g., `KSPCreate()`.
4277: Vectors `x` and `b` CANNOT be the same
4279: The flags are implemented as bitwise inclusive or operations.
4280: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4281: to specify a zero initial guess for SSOR.
4283: Developer Note:
4284: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4286: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4287: @*/
4288: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4289: {
4290: PetscFunctionBegin;
4295: PetscCheckSameComm(mat, 1, b, 2);
4296: PetscCheckSameComm(mat, 1, x, 8);
4297: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4298: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4299: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4300: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4301: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4302: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4303: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4304: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4306: MatCheckPreallocated(mat, 1);
4307: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4308: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4309: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4310: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4311: PetscFunctionReturn(PETSC_SUCCESS);
4312: }
4314: /*
4315: Default matrix copy routine.
4316: */
4317: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4318: {
4319: PetscInt i, rstart = 0, rend = 0, nz;
4320: const PetscInt *cwork;
4321: const PetscScalar *vwork;
4323: PetscFunctionBegin;
4324: if (B->assembled) PetscCall(MatZeroEntries(B));
4325: if (str == SAME_NONZERO_PATTERN) {
4326: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4327: for (i = rstart; i < rend; i++) {
4328: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4329: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4330: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4331: }
4332: } else {
4333: PetscCall(MatAYPX(B, 0.0, A, str));
4334: }
4335: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4336: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4337: PetscFunctionReturn(PETSC_SUCCESS);
4338: }
4340: /*@
4341: MatCopy - Copies a matrix to another matrix.
4343: Collective
4345: Input Parameters:
4346: + A - the matrix
4347: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4349: Output Parameter:
4350: . B - where the copy is put
4352: Level: intermediate
4354: Notes:
4355: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4357: `MatCopy()` copies the matrix entries of a matrix to another existing
4358: matrix (after first zeroing the second matrix). A related routine is
4359: `MatConvert()`, which first creates a new matrix and then copies the data.
4361: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4362: @*/
4363: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4364: {
4365: PetscInt i;
4367: PetscFunctionBegin;
4372: PetscCheckSameComm(A, 1, B, 2);
4373: MatCheckPreallocated(B, 2);
4374: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4375: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4376: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4377: A->cmap->N, B->cmap->N);
4378: MatCheckPreallocated(A, 1);
4379: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4381: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4382: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4383: else PetscCall(MatCopy_Basic(A, B, str));
4385: B->stencil.dim = A->stencil.dim;
4386: B->stencil.noc = A->stencil.noc;
4387: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4388: B->stencil.dims[i] = A->stencil.dims[i];
4389: B->stencil.starts[i] = A->stencil.starts[i];
4390: }
4392: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4393: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4394: PetscFunctionReturn(PETSC_SUCCESS);
4395: }
4397: /*@
4398: MatConvert - Converts a matrix to another matrix, either of the same
4399: or different type.
4401: Collective
4403: Input Parameters:
4404: + mat - the matrix
4405: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4406: same type as the original matrix.
4407: - reuse - denotes if the destination matrix is to be created or reused.
4408: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4409: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4411: Output Parameter:
4412: . M - pointer to place new matrix
4414: Level: intermediate
4416: Notes:
4417: `MatConvert()` first creates a new matrix and then copies the data from
4418: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4419: entries of one matrix to another already existing matrix context.
4421: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4422: the MPI communicator of the generated matrix is always the same as the communicator
4423: of the input matrix.
4425: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4426: @*/
4427: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4428: {
4429: PetscBool sametype, issame, flg;
4430: PetscBool3 issymmetric, ishermitian;
4431: char convname[256], mtype[256];
4432: Mat B;
4434: PetscFunctionBegin;
4437: PetscAssertPointer(M, 4);
4438: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4439: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4440: MatCheckPreallocated(mat, 1);
4442: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4443: if (flg) newtype = mtype;
4445: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4446: PetscCall(PetscStrcmp(newtype, "same", &issame));
4447: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4448: if (reuse == MAT_REUSE_MATRIX) {
4450: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4451: }
4453: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4454: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4455: PetscFunctionReturn(PETSC_SUCCESS);
4456: }
4458: /* Cache Mat options because some converters use MatHeaderReplace */
4459: issymmetric = mat->symmetric;
4460: ishermitian = mat->hermitian;
4462: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4463: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4464: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4465: } else {
4466: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4467: const char *prefix[3] = {"seq", "mpi", ""};
4468: PetscInt i;
4469: /*
4470: Order of precedence:
4471: 0) See if newtype is a superclass of the current matrix.
4472: 1) See if a specialized converter is known to the current matrix.
4473: 2) See if a specialized converter is known to the desired matrix class.
4474: 3) See if a good general converter is registered for the desired class
4475: (as of 6/27/03 only MATMPIADJ falls into this category).
4476: 4) See if a good general converter is known for the current matrix.
4477: 5) Use a really basic converter.
4478: */
4480: /* 0) See if newtype is a superclass of the current matrix.
4481: i.e mat is mpiaij and newtype is aij */
4482: for (i = 0; i < 2; i++) {
4483: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4484: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4485: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4486: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4487: if (flg) {
4488: if (reuse == MAT_INPLACE_MATRIX) {
4489: PetscCall(PetscInfo(mat, "Early return\n"));
4490: PetscFunctionReturn(PETSC_SUCCESS);
4491: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4492: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4493: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4494: PetscFunctionReturn(PETSC_SUCCESS);
4495: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4496: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4497: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4498: PetscFunctionReturn(PETSC_SUCCESS);
4499: }
4500: }
4501: }
4502: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4503: for (i = 0; i < 3; i++) {
4504: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4505: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4506: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4507: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4508: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4509: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4510: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4511: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4512: if (conv) goto foundconv;
4513: }
4515: /* 2) See if a specialized converter is known to the desired matrix class. */
4516: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4517: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4518: PetscCall(MatSetType(B, newtype));
4519: for (i = 0; i < 3; i++) {
4520: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4521: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4522: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4523: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4524: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4525: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4526: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4527: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4528: if (conv) {
4529: PetscCall(MatDestroy(&B));
4530: goto foundconv;
4531: }
4532: }
4534: /* 3) See if a good general converter is registered for the desired class */
4535: conv = B->ops->convertfrom;
4536: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4537: PetscCall(MatDestroy(&B));
4538: if (conv) goto foundconv;
4540: /* 4) See if a good general converter is known for the current matrix */
4541: if (mat->ops->convert) conv = mat->ops->convert;
4542: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4543: if (conv) goto foundconv;
4545: /* 5) Use a really basic converter. */
4546: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4547: conv = MatConvert_Basic;
4549: foundconv:
4550: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4551: PetscCall((*conv)(mat, newtype, reuse, M));
4552: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4553: /* the block sizes must be same if the mappings are copied over */
4554: (*M)->rmap->bs = mat->rmap->bs;
4555: (*M)->cmap->bs = mat->cmap->bs;
4556: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4557: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4558: (*M)->rmap->mapping = mat->rmap->mapping;
4559: (*M)->cmap->mapping = mat->cmap->mapping;
4560: }
4561: (*M)->stencil.dim = mat->stencil.dim;
4562: (*M)->stencil.noc = mat->stencil.noc;
4563: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4564: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4565: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4566: }
4567: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4568: }
4569: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4571: /* Copy Mat options */
4572: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4573: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4574: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4575: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4576: PetscFunctionReturn(PETSC_SUCCESS);
4577: }
4579: /*@
4580: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4582: Not Collective
4584: Input Parameter:
4585: . mat - the matrix, must be a factored matrix
4587: Output Parameter:
4588: . type - the string name of the package (do not free this string)
4590: Level: intermediate
4592: Fortran Note:
4593: Pass in an empty string that is long enough and the package name will be copied into it.
4595: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4596: @*/
4597: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4598: {
4599: PetscErrorCode (*conv)(Mat, MatSolverType *);
4601: PetscFunctionBegin;
4604: PetscAssertPointer(type, 2);
4605: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4606: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4607: if (conv) PetscCall((*conv)(mat, type));
4608: else *type = MATSOLVERPETSC;
4609: PetscFunctionReturn(PETSC_SUCCESS);
4610: }
4612: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4613: struct _MatSolverTypeForSpecifcType {
4614: MatType mtype;
4615: /* no entry for MAT_FACTOR_NONE */
4616: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4617: MatSolverTypeForSpecifcType next;
4618: };
4620: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4621: struct _MatSolverTypeHolder {
4622: char *name;
4623: MatSolverTypeForSpecifcType handlers;
4624: MatSolverTypeHolder next;
4625: };
4627: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4629: /*@C
4630: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4632: Logically Collective, No Fortran Support
4634: Input Parameters:
4635: + package - name of the package, for example petsc or superlu
4636: . mtype - the matrix type that works with this package
4637: . ftype - the type of factorization supported by the package
4638: - createfactor - routine that will create the factored matrix ready to be used
4640: Level: developer
4642: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4643: `MatGetFactor()`
4644: @*/
4645: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4646: {
4647: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4648: PetscBool flg;
4649: MatSolverTypeForSpecifcType inext, iprev = NULL;
4651: PetscFunctionBegin;
4652: PetscCall(MatInitializePackage());
4653: if (!next) {
4654: PetscCall(PetscNew(&MatSolverTypeHolders));
4655: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4656: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4657: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4658: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4659: PetscFunctionReturn(PETSC_SUCCESS);
4660: }
4661: while (next) {
4662: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4663: if (flg) {
4664: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4665: inext = next->handlers;
4666: while (inext) {
4667: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4668: if (flg) {
4669: inext->createfactor[(int)ftype - 1] = createfactor;
4670: PetscFunctionReturn(PETSC_SUCCESS);
4671: }
4672: iprev = inext;
4673: inext = inext->next;
4674: }
4675: PetscCall(PetscNew(&iprev->next));
4676: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4677: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4678: PetscFunctionReturn(PETSC_SUCCESS);
4679: }
4680: prev = next;
4681: next = next->next;
4682: }
4683: PetscCall(PetscNew(&prev->next));
4684: PetscCall(PetscStrallocpy(package, &prev->next->name));
4685: PetscCall(PetscNew(&prev->next->handlers));
4686: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4687: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4688: PetscFunctionReturn(PETSC_SUCCESS);
4689: }
4691: /*@C
4692: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4694: Input Parameters:
4695: + type - name of the package, for example petsc or superlu, if this is 'NULL', then the first result that satisfies the other criteria is returned
4696: . ftype - the type of factorization supported by the type
4697: - mtype - the matrix type that works with this type
4699: Output Parameters:
4700: + foundtype - `PETSC_TRUE` if the type was registered
4701: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4702: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4704: Calling sequence of `createfactor`:
4705: + A - the matrix providing the factor matrix
4706: . ftype - the `MatFactorType` of the factor requested
4707: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4709: Level: developer
4711: Note:
4712: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4713: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4714: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4716: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4717: `MatInitializePackage()`
4718: @*/
4719: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4720: {
4721: MatSolverTypeHolder next = MatSolverTypeHolders;
4722: PetscBool flg;
4723: MatSolverTypeForSpecifcType inext;
4725: PetscFunctionBegin;
4726: if (foundtype) *foundtype = PETSC_FALSE;
4727: if (foundmtype) *foundmtype = PETSC_FALSE;
4728: if (createfactor) *createfactor = NULL;
4730: if (type) {
4731: while (next) {
4732: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4733: if (flg) {
4734: if (foundtype) *foundtype = PETSC_TRUE;
4735: inext = next->handlers;
4736: while (inext) {
4737: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4738: if (flg) {
4739: if (foundmtype) *foundmtype = PETSC_TRUE;
4740: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4741: PetscFunctionReturn(PETSC_SUCCESS);
4742: }
4743: inext = inext->next;
4744: }
4745: }
4746: next = next->next;
4747: }
4748: } else {
4749: while (next) {
4750: inext = next->handlers;
4751: while (inext) {
4752: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4753: if (flg && inext->createfactor[(int)ftype - 1]) {
4754: if (foundtype) *foundtype = PETSC_TRUE;
4755: if (foundmtype) *foundmtype = PETSC_TRUE;
4756: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4757: PetscFunctionReturn(PETSC_SUCCESS);
4758: }
4759: inext = inext->next;
4760: }
4761: next = next->next;
4762: }
4763: /* try with base classes inext->mtype */
4764: next = MatSolverTypeHolders;
4765: while (next) {
4766: inext = next->handlers;
4767: while (inext) {
4768: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4769: if (flg && inext->createfactor[(int)ftype - 1]) {
4770: if (foundtype) *foundtype = PETSC_TRUE;
4771: if (foundmtype) *foundmtype = PETSC_TRUE;
4772: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4773: PetscFunctionReturn(PETSC_SUCCESS);
4774: }
4775: inext = inext->next;
4776: }
4777: next = next->next;
4778: }
4779: }
4780: PetscFunctionReturn(PETSC_SUCCESS);
4781: }
4783: PetscErrorCode MatSolverTypeDestroy(void)
4784: {
4785: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4786: MatSolverTypeForSpecifcType inext, iprev;
4788: PetscFunctionBegin;
4789: while (next) {
4790: PetscCall(PetscFree(next->name));
4791: inext = next->handlers;
4792: while (inext) {
4793: PetscCall(PetscFree(inext->mtype));
4794: iprev = inext;
4795: inext = inext->next;
4796: PetscCall(PetscFree(iprev));
4797: }
4798: prev = next;
4799: next = next->next;
4800: PetscCall(PetscFree(prev));
4801: }
4802: MatSolverTypeHolders = NULL;
4803: PetscFunctionReturn(PETSC_SUCCESS);
4804: }
4806: /*@
4807: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4809: Logically Collective
4811: Input Parameter:
4812: . mat - the matrix
4814: Output Parameter:
4815: . flg - `PETSC_TRUE` if uses the ordering
4817: Level: developer
4819: Note:
4820: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4821: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4823: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4824: @*/
4825: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4826: {
4827: PetscFunctionBegin;
4828: *flg = mat->canuseordering;
4829: PetscFunctionReturn(PETSC_SUCCESS);
4830: }
4832: /*@
4833: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4835: Logically Collective
4837: Input Parameters:
4838: + mat - the matrix obtained with `MatGetFactor()`
4839: - ftype - the factorization type to be used
4841: Output Parameter:
4842: . otype - the preferred ordering type
4844: Level: developer
4846: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4847: @*/
4848: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4849: {
4850: PetscFunctionBegin;
4851: *otype = mat->preferredordering[ftype];
4852: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4853: PetscFunctionReturn(PETSC_SUCCESS);
4854: }
4856: /*@
4857: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()
4859: Collective
4861: Input Parameters:
4862: + mat - the matrix
4863: . type - name of solver type, for example, superlu, petsc (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4864: the other criteria is returned
4865: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4867: Output Parameter:
4868: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4870: Options Database Keys:
4871: + -pc_factor_mat_solver_type <type> - choose the type at run time. When using `KSP` solvers
4872: - -mat_factor_bind_factorization <host, device> - Where to do matrix factorization? Default is device (might consume more device memory.
4873: One can choose host to save device memory). Currently only supported with `MATSEQAIJCUSPARSE` matrices.
4875: Level: intermediate
4877: Notes:
4878: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4879: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4881: Users usually access the factorization solvers via `KSP`
4883: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4884: such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir
4886: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4887: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4888: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4890: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4891: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4892: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4894: Developer Note:
4895: This should actually be called `MatCreateFactor()` since it creates a new factor object
4897: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4898: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4899: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4900: @*/
4901: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4902: {
4903: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4904: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4906: PetscFunctionBegin;
4910: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4911: MatCheckPreallocated(mat, 1);
4913: PetscCall(MatIsShell(mat, &shell));
4914: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4915: if (hasop) {
4916: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4917: PetscFunctionReturn(PETSC_SUCCESS);
4918: }
4920: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4921: if (!foundtype) {
4922: if (type) {
4923: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4924: ((PetscObject)mat)->type_name, type);
4925: } else {
4926: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4927: }
4928: }
4929: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4930: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4932: PetscCall((*conv)(mat, ftype, f));
4933: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4934: PetscFunctionReturn(PETSC_SUCCESS);
4935: }
4937: /*@
4938: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4940: Not Collective
4942: Input Parameters:
4943: + mat - the matrix
4944: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4945: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4947: Output Parameter:
4948: . flg - PETSC_TRUE if the factorization is available
4950: Level: intermediate
4952: Notes:
4953: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4954: such as pastix, superlu, mumps etc.
4956: PETSc must have been ./configure to use the external solver, using the option --download-package
4958: Developer Note:
4959: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4961: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4962: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4963: @*/
4964: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4965: {
4966: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4968: PetscFunctionBegin;
4970: PetscAssertPointer(flg, 4);
4972: *flg = PETSC_FALSE;
4973: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
4975: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4976: MatCheckPreallocated(mat, 1);
4978: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4979: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4980: PetscFunctionReturn(PETSC_SUCCESS);
4981: }
4983: /*@
4984: MatDuplicate - Duplicates a matrix including the non-zero structure.
4986: Collective
4988: Input Parameters:
4989: + mat - the matrix
4990: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4991: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4993: Output Parameter:
4994: . M - pointer to place new matrix
4996: Level: intermediate
4998: Notes:
4999: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
5001: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
5003: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
5005: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
5006: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
5007: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
5009: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
5010: @*/
5011: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
5012: {
5013: Mat B;
5014: VecType vtype;
5015: PetscInt i;
5016: PetscObject dm, container_h, container_d;
5017: void (*viewf)(void);
5019: PetscFunctionBegin;
5022: PetscAssertPointer(M, 3);
5023: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
5024: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5025: MatCheckPreallocated(mat, 1);
5027: *M = NULL;
5028: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
5029: PetscUseTypeMethod(mat, duplicate, op, M);
5030: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
5031: B = *M;
5033: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
5034: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
5035: PetscCall(MatGetVecType(mat, &vtype));
5036: PetscCall(MatSetVecType(B, vtype));
5038: B->stencil.dim = mat->stencil.dim;
5039: B->stencil.noc = mat->stencil.noc;
5040: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
5041: B->stencil.dims[i] = mat->stencil.dims[i];
5042: B->stencil.starts[i] = mat->stencil.starts[i];
5043: }
5045: B->nooffproczerorows = mat->nooffproczerorows;
5046: B->nooffprocentries = mat->nooffprocentries;
5048: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
5049: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
5050: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
5051: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
5052: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
5053: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
5054: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
5055: PetscCall(PetscObjectStateIncrease((PetscObject)B));
5056: PetscFunctionReturn(PETSC_SUCCESS);
5057: }
5059: /*@
5060: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
5062: Logically Collective
5064: Input Parameter:
5065: . mat - the matrix
5067: Output Parameter:
5068: . v - the diagonal of the matrix
5070: Level: intermediate
5072: Note:
5073: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
5074: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
5075: is larger than `ndiag`, the values of the remaining entries are unspecified.
5077: Currently only correct in parallel for square matrices.
5079: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
5080: @*/
5081: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
5082: {
5083: PetscFunctionBegin;
5087: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5088: MatCheckPreallocated(mat, 1);
5089: if (PetscDefined(USE_DEBUG)) {
5090: PetscInt nv, row, col, ndiag;
5092: PetscCall(VecGetLocalSize(v, &nv));
5093: PetscCall(MatGetLocalSize(mat, &row, &col));
5094: ndiag = PetscMin(row, col);
5095: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5096: }
5098: PetscUseTypeMethod(mat, getdiagonal, v);
5099: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5100: PetscFunctionReturn(PETSC_SUCCESS);
5101: }
5103: /*@
5104: MatGetRowMin - Gets the minimum value (of the real part) of each
5105: row of the matrix
5107: Logically Collective
5109: Input Parameter:
5110: . mat - the matrix
5112: Output Parameters:
5113: + v - the vector for storing the maximums
5114: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5116: Level: intermediate
5118: Note:
5119: The result of this call are the same as if one converted the matrix to dense format
5120: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5122: This code is only implemented for a couple of matrix formats.
5124: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5125: `MatGetRowMax()`
5126: @*/
5127: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5128: {
5129: PetscFunctionBegin;
5133: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5135: if (!mat->cmap->N) {
5136: PetscCall(VecSet(v, PETSC_MAX_REAL));
5137: if (idx) {
5138: PetscInt i, m = mat->rmap->n;
5139: for (i = 0; i < m; i++) idx[i] = -1;
5140: }
5141: } else {
5142: MatCheckPreallocated(mat, 1);
5143: }
5144: PetscUseTypeMethod(mat, getrowmin, v, idx);
5145: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5146: PetscFunctionReturn(PETSC_SUCCESS);
5147: }
5149: /*@
5150: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5151: row of the matrix
5153: Logically Collective
5155: Input Parameter:
5156: . mat - the matrix
5158: Output Parameters:
5159: + v - the vector for storing the minimums
5160: - idx - the indices of the column found for each row (or `NULL` if not needed)
5162: Level: intermediate
5164: Notes:
5165: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5166: row is 0 (the first column).
5168: This code is only implemented for a couple of matrix formats.
5170: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5171: @*/
5172: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5173: {
5174: PetscFunctionBegin;
5178: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5179: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5181: if (!mat->cmap->N) {
5182: PetscCall(VecSet(v, 0.0));
5183: if (idx) {
5184: PetscInt i, m = mat->rmap->n;
5185: for (i = 0; i < m; i++) idx[i] = -1;
5186: }
5187: } else {
5188: MatCheckPreallocated(mat, 1);
5189: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5190: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5191: }
5192: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5193: PetscFunctionReturn(PETSC_SUCCESS);
5194: }
5196: /*@
5197: MatGetRowMax - Gets the maximum value (of the real part) of each
5198: row of the matrix
5200: Logically Collective
5202: Input Parameter:
5203: . mat - the matrix
5205: Output Parameters:
5206: + v - the vector for storing the maximums
5207: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5209: Level: intermediate
5211: Notes:
5212: The result of this call are the same as if one converted the matrix to dense format
5213: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5215: This code is only implemented for a couple of matrix formats.
5217: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5218: @*/
5219: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5220: {
5221: PetscFunctionBegin;
5225: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5227: if (!mat->cmap->N) {
5228: PetscCall(VecSet(v, PETSC_MIN_REAL));
5229: if (idx) {
5230: PetscInt i, m = mat->rmap->n;
5231: for (i = 0; i < m; i++) idx[i] = -1;
5232: }
5233: } else {
5234: MatCheckPreallocated(mat, 1);
5235: PetscUseTypeMethod(mat, getrowmax, v, idx);
5236: }
5237: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5238: PetscFunctionReturn(PETSC_SUCCESS);
5239: }
5241: /*@
5242: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5243: row of the matrix
5245: Logically Collective
5247: Input Parameter:
5248: . mat - the matrix
5250: Output Parameters:
5251: + v - the vector for storing the maximums
5252: - idx - the indices of the column found for each row (or `NULL` if not needed)
5254: Level: intermediate
5256: Notes:
5257: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5258: row is 0 (the first column).
5260: This code is only implemented for a couple of matrix formats.
5262: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5263: @*/
5264: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5265: {
5266: PetscFunctionBegin;
5270: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5272: if (!mat->cmap->N) {
5273: PetscCall(VecSet(v, 0.0));
5274: if (idx) {
5275: PetscInt i, m = mat->rmap->n;
5276: for (i = 0; i < m; i++) idx[i] = -1;
5277: }
5278: } else {
5279: MatCheckPreallocated(mat, 1);
5280: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5281: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5282: }
5283: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5284: PetscFunctionReturn(PETSC_SUCCESS);
5285: }
5287: /*@
5288: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5290: Logically Collective
5292: Input Parameter:
5293: . mat - the matrix
5295: Output Parameter:
5296: . v - the vector for storing the sum
5298: Level: intermediate
5300: This code is only implemented for a couple of matrix formats.
5302: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5303: @*/
5304: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5305: {
5306: PetscFunctionBegin;
5310: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5312: if (!mat->cmap->N) {
5313: PetscCall(VecSet(v, 0.0));
5314: } else {
5315: MatCheckPreallocated(mat, 1);
5316: PetscUseTypeMethod(mat, getrowsumabs, v);
5317: }
5318: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5319: PetscFunctionReturn(PETSC_SUCCESS);
5320: }
5322: /*@
5323: MatGetRowSum - Gets the sum of each row of the matrix
5325: Logically or Neighborhood Collective
5327: Input Parameter:
5328: . mat - the matrix
5330: Output Parameter:
5331: . v - the vector for storing the sum of rows
5333: Level: intermediate
5335: Note:
5336: This code is slow since it is not currently specialized for different formats
5338: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5339: @*/
5340: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5341: {
5342: Vec ones;
5344: PetscFunctionBegin;
5348: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5349: MatCheckPreallocated(mat, 1);
5350: PetscCall(MatCreateVecs(mat, &ones, NULL));
5351: PetscCall(VecSet(ones, 1.));
5352: PetscCall(MatMult(mat, ones, v));
5353: PetscCall(VecDestroy(&ones));
5354: PetscFunctionReturn(PETSC_SUCCESS);
5355: }
5357: /*@
5358: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5359: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5361: Collective
5363: Input Parameter:
5364: . mat - the matrix to provide the transpose
5366: Output Parameter:
5367: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5369: Level: advanced
5371: Note:
5372: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5373: routine allows bypassing that call.
5375: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5376: @*/
5377: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5378: {
5379: MatParentState *rb = NULL;
5381: PetscFunctionBegin;
5382: PetscCall(PetscNew(&rb));
5383: rb->id = ((PetscObject)mat)->id;
5384: rb->state = 0;
5385: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5386: PetscCall(PetscObjectContainerCompose((PetscObject)B, "MatTransposeParent", rb, PetscCtxDestroyDefault));
5387: PetscFunctionReturn(PETSC_SUCCESS);
5388: }
5390: /*@
5391: MatTranspose - Computes an in-place or out-of-place transpose of a matrix.
5393: Collective
5395: Input Parameters:
5396: + mat - the matrix to transpose
5397: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5399: Output Parameter:
5400: . B - the transpose
5402: Level: intermediate
5404: Notes:
5405: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5407: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX`. If you already have a matrix to contain the
5408: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5410: If the nonzero structure of mat changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5412: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.
5414: If mat is unchanged from the last call this function returns immediately without recomputing the result
5416: If you only need the symbolic transpose, and not the numerical values, use `MatTransposeSymbolic()`
5418: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5419: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5420: @*/
5421: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5422: {
5423: PetscContainer rB = NULL;
5424: MatParentState *rb = NULL;
5426: PetscFunctionBegin;
5429: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5430: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5431: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5432: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5433: MatCheckPreallocated(mat, 1);
5434: if (reuse == MAT_REUSE_MATRIX) {
5435: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5436: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5437: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5438: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5439: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5440: }
5442: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5443: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5444: PetscUseTypeMethod(mat, transpose, reuse, B);
5445: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5446: }
5447: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5449: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5450: if (reuse != MAT_INPLACE_MATRIX) {
5451: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5452: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5453: rb->state = ((PetscObject)mat)->state;
5454: rb->nonzerostate = mat->nonzerostate;
5455: }
5456: PetscFunctionReturn(PETSC_SUCCESS);
5457: }
5459: /*@
5460: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5462: Collective
5464: Input Parameter:
5465: . A - the matrix to transpose
5467: Output Parameter:
5468: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5469: numerical portion.
5471: Level: intermediate
5473: Note:
5474: This is not supported for many matrix types, use `MatTranspose()` in those cases
5476: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5477: @*/
5478: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5479: {
5480: PetscFunctionBegin;
5483: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5484: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5485: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5486: PetscUseTypeMethod(A, transposesymbolic, B);
5487: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5489: PetscCall(MatTransposeSetPrecursor(A, *B));
5490: PetscFunctionReturn(PETSC_SUCCESS);
5491: }
5493: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5494: {
5495: PetscContainer rB;
5496: MatParentState *rb;
5498: PetscFunctionBegin;
5501: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5502: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5503: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5504: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5505: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5506: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5507: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5508: PetscFunctionReturn(PETSC_SUCCESS);
5509: }
5511: /*@
5512: MatIsTranspose - Test whether a matrix is another one's transpose,
5513: or its own, in which case it tests symmetry.
5515: Collective
5517: Input Parameters:
5518: + A - the matrix to test
5519: . B - the matrix to test against, this can equal the first parameter
5520: - tol - tolerance, differences between entries smaller than this are counted as zero
5522: Output Parameter:
5523: . flg - the result
5525: Level: intermediate
5527: Notes:
5528: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5529: test involves parallel copies of the block off-diagonal parts of the matrix.
5531: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5532: @*/
5533: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5534: {
5535: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5537: PetscFunctionBegin;
5540: PetscAssertPointer(flg, 4);
5541: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5542: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5543: *flg = PETSC_FALSE;
5544: if (f && g) {
5545: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5546: PetscCall((*f)(A, B, tol, flg));
5547: } else {
5548: MatType mattype;
5550: PetscCall(MatGetType(f ? B : A, &mattype));
5551: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5552: }
5553: PetscFunctionReturn(PETSC_SUCCESS);
5554: }
5556: /*@
5557: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5559: Collective
5561: Input Parameters:
5562: + mat - the matrix to transpose and complex conjugate
5563: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5565: Output Parameter:
5566: . B - the Hermitian transpose
5568: Level: intermediate
5570: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5571: @*/
5572: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5573: {
5574: PetscFunctionBegin;
5575: PetscCall(MatTranspose(mat, reuse, B));
5576: #if defined(PETSC_USE_COMPLEX)
5577: PetscCall(MatConjugate(*B));
5578: #endif
5579: PetscFunctionReturn(PETSC_SUCCESS);
5580: }
5582: /*@
5583: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5585: Collective
5587: Input Parameters:
5588: + A - the matrix to test
5589: . B - the matrix to test against, this can equal the first parameter
5590: - tol - tolerance, differences between entries smaller than this are counted as zero
5592: Output Parameter:
5593: . flg - the result
5595: Level: intermediate
5597: Notes:
5598: Only available for `MATAIJ` matrices.
5600: The sequential algorithm
5601: has a running time of the order of the number of nonzeros; the parallel
5602: test involves parallel copies of the block off-diagonal parts of the matrix.
5604: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5605: @*/
5606: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5607: {
5608: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5610: PetscFunctionBegin;
5613: PetscAssertPointer(flg, 4);
5614: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5615: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5616: if (f && g) {
5617: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5618: PetscCall((*f)(A, B, tol, flg));
5619: }
5620: PetscFunctionReturn(PETSC_SUCCESS);
5621: }
5623: /*@
5624: MatPermute - Creates a new matrix with rows and columns permuted from the
5625: original.
5627: Collective
5629: Input Parameters:
5630: + mat - the matrix to permute
5631: . row - row permutation, each processor supplies only the permutation for its rows
5632: - col - column permutation, each processor supplies only the permutation for its columns
5634: Output Parameter:
5635: . B - the permuted matrix
5637: Level: advanced
5639: Note:
5640: The index sets map from row/col of permuted matrix to row/col of original matrix.
5641: The index sets should be on the same communicator as mat and have the same local sizes.
5643: Developer Note:
5644: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5645: exploit the fact that row and col are permutations, consider implementing the
5646: more general `MatCreateSubMatrix()` instead.
5648: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5649: @*/
5650: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5651: {
5652: PetscFunctionBegin;
5657: PetscAssertPointer(B, 4);
5658: PetscCheckSameComm(mat, 1, row, 2);
5659: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5660: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5661: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5662: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5663: MatCheckPreallocated(mat, 1);
5665: if (mat->ops->permute) {
5666: PetscUseTypeMethod(mat, permute, row, col, B);
5667: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5668: } else {
5669: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5670: }
5671: PetscFunctionReturn(PETSC_SUCCESS);
5672: }
5674: /*@
5675: MatEqual - Compares two matrices.
5677: Collective
5679: Input Parameters:
5680: + A - the first matrix
5681: - B - the second matrix
5683: Output Parameter:
5684: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5686: Level: intermediate
5688: .seealso: [](ch_matrices), `Mat`
5689: @*/
5690: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5691: {
5692: PetscFunctionBegin;
5697: PetscAssertPointer(flg, 3);
5698: PetscCheckSameComm(A, 1, B, 2);
5699: MatCheckPreallocated(A, 1);
5700: MatCheckPreallocated(B, 2);
5701: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5702: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5703: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5704: B->cmap->N);
5705: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5706: PetscUseTypeMethod(A, equal, B, flg);
5707: } else {
5708: PetscCall(MatMultEqual(A, B, 10, flg));
5709: }
5710: PetscFunctionReturn(PETSC_SUCCESS);
5711: }
5713: /*@
5714: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5715: matrices that are stored as vectors. Either of the two scaling
5716: matrices can be `NULL`.
5718: Collective
5720: Input Parameters:
5721: + mat - the matrix to be scaled
5722: . l - the left scaling vector (or `NULL`)
5723: - r - the right scaling vector (or `NULL`)
5725: Level: intermediate
5727: Note:
5728: `MatDiagonalScale()` computes $A = LAR$, where
5729: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5730: The L scales the rows of the matrix, the R scales the columns of the matrix.
5732: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5733: @*/
5734: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5735: {
5736: PetscFunctionBegin;
5739: if (l) {
5741: PetscCheckSameComm(mat, 1, l, 2);
5742: }
5743: if (r) {
5745: PetscCheckSameComm(mat, 1, r, 3);
5746: }
5747: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5748: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5749: MatCheckPreallocated(mat, 1);
5750: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5752: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5753: PetscUseTypeMethod(mat, diagonalscale, l, r);
5754: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5755: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5756: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5757: PetscFunctionReturn(PETSC_SUCCESS);
5758: }
5760: /*@
5761: MatScale - Scales all elements of a matrix by a given number.
5763: Logically Collective
5765: Input Parameters:
5766: + mat - the matrix to be scaled
5767: - a - the scaling value
5769: Level: intermediate
5771: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5772: @*/
5773: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5774: {
5775: PetscFunctionBegin;
5778: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5779: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5781: MatCheckPreallocated(mat, 1);
5783: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5784: if (a != (PetscScalar)1.0) {
5785: PetscUseTypeMethod(mat, scale, a);
5786: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5787: }
5788: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5789: PetscFunctionReturn(PETSC_SUCCESS);
5790: }
5792: /*@
5793: MatNorm - Calculates various norms of a matrix.
5795: Collective
5797: Input Parameters:
5798: + mat - the matrix
5799: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5801: Output Parameter:
5802: . nrm - the resulting norm
5804: Level: intermediate
5806: .seealso: [](ch_matrices), `Mat`
5807: @*/
5808: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5809: {
5810: PetscFunctionBegin;
5813: PetscAssertPointer(nrm, 3);
5815: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5816: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5817: MatCheckPreallocated(mat, 1);
5819: PetscUseTypeMethod(mat, norm, type, nrm);
5820: PetscFunctionReturn(PETSC_SUCCESS);
5821: }
5823: /*
5824: This variable is used to prevent counting of MatAssemblyBegin() that
5825: are called from within a MatAssemblyEnd().
5826: */
5827: static PetscInt MatAssemblyEnd_InUse = 0;
5828: /*@
5829: MatAssemblyBegin - Begins assembling the matrix. This routine should
5830: be called after completing all calls to `MatSetValues()`.
5832: Collective
5834: Input Parameters:
5835: + mat - the matrix
5836: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5838: Level: beginner
5840: Notes:
5841: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5842: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5844: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5845: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5846: using the matrix.
5848: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5849: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5850: a global collective operation requiring all processes that share the matrix.
5852: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5853: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5854: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5856: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5857: @*/
5858: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5859: {
5860: PetscFunctionBegin;
5863: MatCheckPreallocated(mat, 1);
5864: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5865: if (mat->assembled) {
5866: mat->was_assembled = PETSC_TRUE;
5867: mat->assembled = PETSC_FALSE;
5868: }
5870: if (!MatAssemblyEnd_InUse) {
5871: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5872: PetscTryTypeMethod(mat, assemblybegin, type);
5873: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5874: } else PetscTryTypeMethod(mat, assemblybegin, type);
5875: PetscFunctionReturn(PETSC_SUCCESS);
5876: }
5878: /*@
5879: MatAssembled - Indicates if a matrix has been assembled and is ready for
5880: use; for example, in matrix-vector product.
5882: Not Collective
5884: Input Parameter:
5885: . mat - the matrix
5887: Output Parameter:
5888: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5890: Level: advanced
5892: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5893: @*/
5894: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5895: {
5896: PetscFunctionBegin;
5898: PetscAssertPointer(assembled, 2);
5899: *assembled = mat->assembled;
5900: PetscFunctionReturn(PETSC_SUCCESS);
5901: }
5903: /*@
5904: MatAssemblyEnd - Completes assembling the matrix. This routine should
5905: be called after `MatAssemblyBegin()`.
5907: Collective
5909: Input Parameters:
5910: + mat - the matrix
5911: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5913: Options Database Keys:
5914: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5915: . -mat_view ::ascii_info_detail - Prints more detailed info
5916: . -mat_view - Prints matrix in ASCII format
5917: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5918: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5919: . -display <name> - Sets display name (default is host)
5920: . -draw_pause <sec> - Sets number of seconds to pause after display
5921: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5922: . -viewer_socket_machine <machine> - Machine to use for socket
5923: . -viewer_socket_port <port> - Port number to use for socket
5924: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5926: Level: beginner
5928: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5929: @*/
5930: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5931: {
5932: static PetscInt inassm = 0;
5933: PetscBool flg = PETSC_FALSE;
5935: PetscFunctionBegin;
5939: inassm++;
5940: MatAssemblyEnd_InUse++;
5941: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5942: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5943: PetscTryTypeMethod(mat, assemblyend, type);
5944: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5945: } else PetscTryTypeMethod(mat, assemblyend, type);
5947: /* Flush assembly is not a true assembly */
5948: if (type != MAT_FLUSH_ASSEMBLY) {
5949: if (mat->num_ass) {
5950: if (!mat->symmetry_eternal) {
5951: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5952: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5953: }
5954: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5955: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5956: }
5957: mat->num_ass++;
5958: mat->assembled = PETSC_TRUE;
5959: mat->ass_nonzerostate = mat->nonzerostate;
5960: }
5962: mat->insertmode = NOT_SET_VALUES;
5963: MatAssemblyEnd_InUse--;
5964: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5965: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5966: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5968: if (mat->checksymmetryonassembly) {
5969: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5970: if (flg) {
5971: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5972: } else {
5973: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5974: }
5975: }
5976: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5977: }
5978: inassm--;
5979: PetscFunctionReturn(PETSC_SUCCESS);
5980: }
5982: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5983: /*@
5984: MatSetOption - Sets a parameter option for a matrix. Some options
5985: may be specific to certain storage formats. Some options
5986: determine how values will be inserted (or added). Sorted,
5987: row-oriented input will generally assemble the fastest. The default
5988: is row-oriented.
5990: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5992: Input Parameters:
5993: + mat - the matrix
5994: . op - the option, one of those listed below (and possibly others),
5995: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5997: Options Describing Matrix Structure:
5998: + `MAT_SPD` - symmetric positive definite
5999: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
6000: . `MAT_HERMITIAN` - transpose is the complex conjugation
6001: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
6002: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
6003: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
6004: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
6006: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
6007: do not need to be computed (usually at a high cost)
6009: Options For Use with `MatSetValues()`:
6010: Insert a logically dense subblock, which can be
6011: . `MAT_ROW_ORIENTED` - row-oriented (default)
6013: These options reflect the data you pass in with `MatSetValues()`; it has
6014: nothing to do with how the data is stored internally in the matrix
6015: data structure.
6017: When (re)assembling a matrix, we can restrict the input for
6018: efficiency/debugging purposes. These options include
6019: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
6020: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
6021: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
6022: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
6023: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
6024: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
6025: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
6026: performance for very large process counts.
6027: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
6028: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
6029: functions, instead sending only neighbor messages.
6031: Level: intermediate
6033: Notes:
6034: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
6036: Some options are relevant only for particular matrix types and
6037: are thus ignored by others. Other options are not supported by
6038: certain matrix types and will generate an error message if set.
6040: If using Fortran to compute a matrix, one may need to
6041: use the column-oriented option (or convert to the row-oriented
6042: format).
6044: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
6045: that would generate a new entry in the nonzero structure is instead
6046: ignored. Thus, if memory has not already been allocated for this particular
6047: data, then the insertion is ignored. For dense matrices, in which
6048: the entire array is allocated, no entries are ever ignored.
6049: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6051: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
6052: that would generate a new entry in the nonzero structure instead produces
6053: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6055: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
6056: that would generate a new entry that has not been preallocated will
6057: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
6058: only.) This is a useful flag when debugging matrix memory preallocation.
6059: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6061: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
6062: other processors should be dropped, rather than stashed.
6063: This is useful if you know that the "owning" processor is also
6064: always generating the correct matrix entries, so that PETSc need
6065: not transfer duplicate entries generated on another processor.
6067: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6068: searches during matrix assembly. When this flag is set, the hash table
6069: is created during the first matrix assembly. This hash table is
6070: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6071: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6072: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6073: supported by `MATMPIBAIJ` format only.
6075: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6076: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6078: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6079: a zero location in the matrix
6081: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6083: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6084: zero row routines and thus improves performance for very large process counts.
6086: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6087: part of the matrix (since they should match the upper triangular part).
6089: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6090: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6091: with finite difference schemes with non-periodic boundary conditions.
6093: Developer Note:
6094: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6095: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6096: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6097: not changed.
6099: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6100: @*/
6101: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6102: {
6103: PetscFunctionBegin;
6105: if (op > 0) {
6108: }
6110: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6112: switch (op) {
6113: case MAT_FORCE_DIAGONAL_ENTRIES:
6114: mat->force_diagonals = flg;
6115: PetscFunctionReturn(PETSC_SUCCESS);
6116: case MAT_NO_OFF_PROC_ENTRIES:
6117: mat->nooffprocentries = flg;
6118: PetscFunctionReturn(PETSC_SUCCESS);
6119: case MAT_SUBSET_OFF_PROC_ENTRIES:
6120: mat->assembly_subset = flg;
6121: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6122: #if !defined(PETSC_HAVE_MPIUNI)
6123: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6124: #endif
6125: mat->stash.first_assembly_done = PETSC_FALSE;
6126: }
6127: PetscFunctionReturn(PETSC_SUCCESS);
6128: case MAT_NO_OFF_PROC_ZERO_ROWS:
6129: mat->nooffproczerorows = flg;
6130: PetscFunctionReturn(PETSC_SUCCESS);
6131: case MAT_SPD:
6132: if (flg) {
6133: mat->spd = PETSC_BOOL3_TRUE;
6134: mat->symmetric = PETSC_BOOL3_TRUE;
6135: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6136: } else {
6137: mat->spd = PETSC_BOOL3_FALSE;
6138: }
6139: break;
6140: case MAT_SYMMETRIC:
6141: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6142: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6143: #if !defined(PETSC_USE_COMPLEX)
6144: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6145: #endif
6146: break;
6147: case MAT_HERMITIAN:
6148: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6149: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6150: #if !defined(PETSC_USE_COMPLEX)
6151: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6152: #endif
6153: break;
6154: case MAT_STRUCTURALLY_SYMMETRIC:
6155: mat->structurally_symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6156: break;
6157: case MAT_SYMMETRY_ETERNAL:
6158: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6159: mat->symmetry_eternal = flg;
6160: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6161: break;
6162: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6163: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6164: mat->structural_symmetry_eternal = flg;
6165: break;
6166: case MAT_SPD_ETERNAL:
6167: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6168: mat->spd_eternal = flg;
6169: if (flg) {
6170: mat->structural_symmetry_eternal = PETSC_TRUE;
6171: mat->symmetry_eternal = PETSC_TRUE;
6172: }
6173: break;
6174: case MAT_STRUCTURE_ONLY:
6175: mat->structure_only = flg;
6176: break;
6177: case MAT_SORTED_FULL:
6178: mat->sortedfull = flg;
6179: break;
6180: default:
6181: break;
6182: }
6183: PetscTryTypeMethod(mat, setoption, op, flg);
6184: PetscFunctionReturn(PETSC_SUCCESS);
6185: }
6187: /*@
6188: MatGetOption - Gets a parameter option that has been set for a matrix.
6190: Logically Collective
6192: Input Parameters:
6193: + mat - the matrix
6194: - op - the option, this only responds to certain options, check the code for which ones
6196: Output Parameter:
6197: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6199: Level: intermediate
6201: Notes:
6202: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6204: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6205: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6207: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6208: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6209: @*/
6210: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6211: {
6212: PetscFunctionBegin;
6216: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6217: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6219: switch (op) {
6220: case MAT_NO_OFF_PROC_ENTRIES:
6221: *flg = mat->nooffprocentries;
6222: break;
6223: case MAT_NO_OFF_PROC_ZERO_ROWS:
6224: *flg = mat->nooffproczerorows;
6225: break;
6226: case MAT_SYMMETRIC:
6227: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6228: break;
6229: case MAT_HERMITIAN:
6230: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6231: break;
6232: case MAT_STRUCTURALLY_SYMMETRIC:
6233: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6234: break;
6235: case MAT_SPD:
6236: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6237: break;
6238: case MAT_SYMMETRY_ETERNAL:
6239: *flg = mat->symmetry_eternal;
6240: break;
6241: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6242: *flg = mat->symmetry_eternal;
6243: break;
6244: default:
6245: break;
6246: }
6247: PetscFunctionReturn(PETSC_SUCCESS);
6248: }
6250: /*@
6251: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6252: this routine retains the old nonzero structure.
6254: Logically Collective
6256: Input Parameter:
6257: . mat - the matrix
6259: Level: intermediate
6261: Note:
6262: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6263: See the Performance chapter of the users manual for information on preallocating matrices.
6265: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6266: @*/
6267: PetscErrorCode MatZeroEntries(Mat mat)
6268: {
6269: PetscFunctionBegin;
6272: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6273: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6274: MatCheckPreallocated(mat, 1);
6276: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6277: PetscUseTypeMethod(mat, zeroentries);
6278: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6279: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6280: PetscFunctionReturn(PETSC_SUCCESS);
6281: }
6283: /*@
6284: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6285: of a set of rows and columns of a matrix.
6287: Collective
6289: Input Parameters:
6290: + mat - the matrix
6291: . numRows - the number of rows/columns to zero
6292: . rows - the global row indices
6293: . diag - value put in the diagonal of the eliminated rows
6294: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6295: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6297: Level: intermediate
6299: Notes:
6300: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6302: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6303: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6305: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6306: Krylov method to take advantage of the known solution on the zeroed rows.
6308: For the parallel case, all processes that share the matrix (i.e.,
6309: those in the communicator used for matrix creation) MUST call this
6310: routine, regardless of whether any rows being zeroed are owned by
6311: them.
6313: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6314: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6315: missing.
6317: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6318: list only rows local to itself).
6320: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6322: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6323: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6324: @*/
6325: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6326: {
6327: PetscFunctionBegin;
6330: if (numRows) PetscAssertPointer(rows, 3);
6331: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6332: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6333: MatCheckPreallocated(mat, 1);
6335: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6336: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6337: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6338: PetscFunctionReturn(PETSC_SUCCESS);
6339: }
6341: /*@
6342: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6343: of a set of rows and columns of a matrix.
6345: Collective
6347: Input Parameters:
6348: + mat - the matrix
6349: . is - the rows to zero
6350: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6351: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6352: - b - optional vector of right-hand side, that will be adjusted by provided solution
6354: Level: intermediate
6356: Note:
6357: See `MatZeroRowsColumns()` for details on how this routine operates.
6359: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6360: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6361: @*/
6362: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6363: {
6364: PetscInt numRows;
6365: const PetscInt *rows;
6367: PetscFunctionBegin;
6372: PetscCall(ISGetLocalSize(is, &numRows));
6373: PetscCall(ISGetIndices(is, &rows));
6374: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6375: PetscCall(ISRestoreIndices(is, &rows));
6376: PetscFunctionReturn(PETSC_SUCCESS);
6377: }
6379: /*@
6380: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6381: of a set of rows of a matrix.
6383: Collective
6385: Input Parameters:
6386: + mat - the matrix
6387: . numRows - the number of rows to zero
6388: . rows - the global row indices
6389: . diag - value put in the diagonal of the zeroed rows
6390: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6391: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6393: Level: intermediate
6395: Notes:
6396: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6398: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6400: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6401: Krylov method to take advantage of the known solution on the zeroed rows.
6403: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6404: from the matrix.
6406: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6407: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense and block diagonal
6408: formats this does not alter the nonzero structure.
6410: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6411: of the matrix is not changed the values are
6412: merely zeroed.
6414: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6415: formats can optionally remove the main diagonal entry from the
6416: nonzero structure as well, by passing 0.0 as the final argument).
6418: For the parallel case, all processes that share the matrix (i.e.,
6419: those in the communicator used for matrix creation) MUST call this
6420: routine, regardless of whether any rows being zeroed are owned by
6421: them.
6423: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6424: list only rows local to itself).
6426: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6427: owns that are to be zeroed. This saves a global synchronization in the implementation.
6429: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6430: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6431: @*/
6432: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6433: {
6434: PetscFunctionBegin;
6437: if (numRows) PetscAssertPointer(rows, 3);
6438: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6439: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6440: MatCheckPreallocated(mat, 1);
6442: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6443: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6444: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6445: PetscFunctionReturn(PETSC_SUCCESS);
6446: }
6448: /*@
6449: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6450: of a set of rows of a matrix.
6452: Collective
6454: Input Parameters:
6455: + mat - the matrix
6456: . is - index set of rows to remove (if `NULL` then no row is removed)
6457: . diag - value put in all diagonals of eliminated rows
6458: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6459: - b - optional vector of right-hand side, that will be adjusted by provided solution
6461: Level: intermediate
6463: Note:
6464: See `MatZeroRows()` for details on how this routine operates.
6466: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6467: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6468: @*/
6469: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6470: {
6471: PetscInt numRows = 0;
6472: const PetscInt *rows = NULL;
6474: PetscFunctionBegin;
6477: if (is) {
6479: PetscCall(ISGetLocalSize(is, &numRows));
6480: PetscCall(ISGetIndices(is, &rows));
6481: }
6482: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6483: if (is) PetscCall(ISRestoreIndices(is, &rows));
6484: PetscFunctionReturn(PETSC_SUCCESS);
6485: }
6487: /*@
6488: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6489: of a set of rows of a matrix. These rows must be local to the process.
6491: Collective
6493: Input Parameters:
6494: + mat - the matrix
6495: . numRows - the number of rows to remove
6496: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6497: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6498: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6499: - b - optional vector of right-hand side, that will be adjusted by provided solution
6501: Level: intermediate
6503: Notes:
6504: See `MatZeroRows()` for details on how this routine operates.
6506: The grid coordinates are across the entire grid, not just the local portion
6508: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6509: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6510: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6511: `DM_BOUNDARY_PERIODIC` boundary type.
6513: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6514: a single value per point) you can skip filling those indices.
6516: Fortran Note:
6517: `idxm` and `idxn` should be declared as
6518: $ MatStencil idxm(4, m)
6519: and the values inserted using
6520: .vb
6521: idxm(MatStencil_i, 1) = i
6522: idxm(MatStencil_j, 1) = j
6523: idxm(MatStencil_k, 1) = k
6524: idxm(MatStencil_c, 1) = c
6525: etc
6526: .ve
6528: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6529: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6530: @*/
6531: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6532: {
6533: PetscInt dim = mat->stencil.dim;
6534: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6535: PetscInt *dims = mat->stencil.dims + 1;
6536: PetscInt *starts = mat->stencil.starts;
6537: PetscInt *dxm = (PetscInt *)rows;
6538: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6540: PetscFunctionBegin;
6543: if (numRows) PetscAssertPointer(rows, 3);
6545: PetscCall(PetscMalloc1(numRows, &jdxm));
6546: for (i = 0; i < numRows; ++i) {
6547: /* Skip unused dimensions (they are ordered k, j, i, c) */
6548: for (j = 0; j < 3 - sdim; ++j) dxm++;
6549: /* Local index in X dir */
6550: tmp = *dxm++ - starts[0];
6551: /* Loop over remaining dimensions */
6552: for (j = 0; j < dim - 1; ++j) {
6553: /* If nonlocal, set index to be negative */
6554: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6555: /* Update local index */
6556: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6557: }
6558: /* Skip component slot if necessary */
6559: if (mat->stencil.noc) dxm++;
6560: /* Local row number */
6561: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6562: }
6563: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6564: PetscCall(PetscFree(jdxm));
6565: PetscFunctionReturn(PETSC_SUCCESS);
6566: }
6568: /*@
6569: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6570: of a set of rows and columns of a matrix.
6572: Collective
6574: Input Parameters:
6575: + mat - the matrix
6576: . numRows - the number of rows/columns to remove
6577: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6578: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6579: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6580: - b - optional vector of right-hand side, that will be adjusted by provided solution
6582: Level: intermediate
6584: Notes:
6585: See `MatZeroRowsColumns()` for details on how this routine operates.
6587: The grid coordinates are across the entire grid, not just the local portion
6589: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6590: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6591: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6592: `DM_BOUNDARY_PERIODIC` boundary type.
6594: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6595: a single value per point) you can skip filling those indices.
6597: Fortran Note:
6598: `idxm` and `idxn` should be declared as
6599: $ MatStencil idxm(4, m)
6600: and the values inserted using
6601: .vb
6602: idxm(MatStencil_i, 1) = i
6603: idxm(MatStencil_j, 1) = j
6604: idxm(MatStencil_k, 1) = k
6605: idxm(MatStencil_c, 1) = c
6606: etc
6607: .ve
6609: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6610: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6611: @*/
6612: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6613: {
6614: PetscInt dim = mat->stencil.dim;
6615: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6616: PetscInt *dims = mat->stencil.dims + 1;
6617: PetscInt *starts = mat->stencil.starts;
6618: PetscInt *dxm = (PetscInt *)rows;
6619: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6621: PetscFunctionBegin;
6624: if (numRows) PetscAssertPointer(rows, 3);
6626: PetscCall(PetscMalloc1(numRows, &jdxm));
6627: for (i = 0; i < numRows; ++i) {
6628: /* Skip unused dimensions (they are ordered k, j, i, c) */
6629: for (j = 0; j < 3 - sdim; ++j) dxm++;
6630: /* Local index in X dir */
6631: tmp = *dxm++ - starts[0];
6632: /* Loop over remaining dimensions */
6633: for (j = 0; j < dim - 1; ++j) {
6634: /* If nonlocal, set index to be negative */
6635: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6636: /* Update local index */
6637: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6638: }
6639: /* Skip component slot if necessary */
6640: if (mat->stencil.noc) dxm++;
6641: /* Local row number */
6642: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6643: }
6644: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6645: PetscCall(PetscFree(jdxm));
6646: PetscFunctionReturn(PETSC_SUCCESS);
6647: }
6649: /*@
6650: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6651: of a set of rows of a matrix; using local numbering of rows.
6653: Collective
6655: Input Parameters:
6656: + mat - the matrix
6657: . numRows - the number of rows to remove
6658: . rows - the local row indices
6659: . diag - value put in all diagonals of eliminated rows
6660: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6661: - b - optional vector of right-hand side, that will be adjusted by provided solution
6663: Level: intermediate
6665: Notes:
6666: Before calling `MatZeroRowsLocal()`, the user must first set the
6667: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6669: See `MatZeroRows()` for details on how this routine operates.
6671: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6672: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6673: @*/
6674: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6675: {
6676: PetscFunctionBegin;
6679: if (numRows) PetscAssertPointer(rows, 3);
6680: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6681: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6682: MatCheckPreallocated(mat, 1);
6684: if (mat->ops->zerorowslocal) {
6685: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6686: } else {
6687: IS is, newis;
6688: const PetscInt *newRows;
6690: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6691: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6692: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6693: PetscCall(ISGetIndices(newis, &newRows));
6694: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6695: PetscCall(ISRestoreIndices(newis, &newRows));
6696: PetscCall(ISDestroy(&newis));
6697: PetscCall(ISDestroy(&is));
6698: }
6699: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6700: PetscFunctionReturn(PETSC_SUCCESS);
6701: }
6703: /*@
6704: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6705: of a set of rows of a matrix; using local numbering of rows.
6707: Collective
6709: Input Parameters:
6710: + mat - the matrix
6711: . is - index set of rows to remove
6712: . diag - value put in all diagonals of eliminated rows
6713: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6714: - b - optional vector of right-hand side, that will be adjusted by provided solution
6716: Level: intermediate
6718: Notes:
6719: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6720: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6722: See `MatZeroRows()` for details on how this routine operates.
6724: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6725: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6726: @*/
6727: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6728: {
6729: PetscInt numRows;
6730: const PetscInt *rows;
6732: PetscFunctionBegin;
6736: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6737: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6738: MatCheckPreallocated(mat, 1);
6740: PetscCall(ISGetLocalSize(is, &numRows));
6741: PetscCall(ISGetIndices(is, &rows));
6742: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6743: PetscCall(ISRestoreIndices(is, &rows));
6744: PetscFunctionReturn(PETSC_SUCCESS);
6745: }
6747: /*@
6748: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6749: of a set of rows and columns of a matrix; using local numbering of rows.
6751: Collective
6753: Input Parameters:
6754: + mat - the matrix
6755: . numRows - the number of rows to remove
6756: . rows - the global row indices
6757: . diag - value put in all diagonals of eliminated rows
6758: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6759: - b - optional vector of right-hand side, that will be adjusted by provided solution
6761: Level: intermediate
6763: Notes:
6764: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6765: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6767: See `MatZeroRowsColumns()` for details on how this routine operates.
6769: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6770: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6771: @*/
6772: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6773: {
6774: IS is, newis;
6775: const PetscInt *newRows;
6777: PetscFunctionBegin;
6780: if (numRows) PetscAssertPointer(rows, 3);
6781: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6782: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6783: MatCheckPreallocated(mat, 1);
6785: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6786: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6787: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6788: PetscCall(ISGetIndices(newis, &newRows));
6789: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6790: PetscCall(ISRestoreIndices(newis, &newRows));
6791: PetscCall(ISDestroy(&newis));
6792: PetscCall(ISDestroy(&is));
6793: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6794: PetscFunctionReturn(PETSC_SUCCESS);
6795: }
6797: /*@
6798: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6799: of a set of rows and columns of a matrix; using local numbering of rows.
6801: Collective
6803: Input Parameters:
6804: + mat - the matrix
6805: . is - index set of rows to remove
6806: . diag - value put in all diagonals of eliminated rows
6807: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6808: - b - optional vector of right-hand side, that will be adjusted by provided solution
6810: Level: intermediate
6812: Notes:
6813: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6814: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6816: See `MatZeroRowsColumns()` for details on how this routine operates.
6818: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6819: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6820: @*/
6821: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6822: {
6823: PetscInt numRows;
6824: const PetscInt *rows;
6826: PetscFunctionBegin;
6830: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6831: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6832: MatCheckPreallocated(mat, 1);
6834: PetscCall(ISGetLocalSize(is, &numRows));
6835: PetscCall(ISGetIndices(is, &rows));
6836: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6837: PetscCall(ISRestoreIndices(is, &rows));
6838: PetscFunctionReturn(PETSC_SUCCESS);
6839: }
6841: /*@
6842: MatGetSize - Returns the numbers of rows and columns in a matrix.
6844: Not Collective
6846: Input Parameter:
6847: . mat - the matrix
6849: Output Parameters:
6850: + m - the number of global rows
6851: - n - the number of global columns
6853: Level: beginner
6855: Note:
6856: Both output parameters can be `NULL` on input.
6858: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6859: @*/
6860: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6861: {
6862: PetscFunctionBegin;
6864: if (m) *m = mat->rmap->N;
6865: if (n) *n = mat->cmap->N;
6866: PetscFunctionReturn(PETSC_SUCCESS);
6867: }
6869: /*@
6870: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6871: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6873: Not Collective
6875: Input Parameter:
6876: . mat - the matrix
6878: Output Parameters:
6879: + m - the number of local rows, use `NULL` to not obtain this value
6880: - n - the number of local columns, use `NULL` to not obtain this value
6882: Level: beginner
6884: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6885: @*/
6886: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6887: {
6888: PetscFunctionBegin;
6890: if (m) PetscAssertPointer(m, 2);
6891: if (n) PetscAssertPointer(n, 3);
6892: if (m) *m = mat->rmap->n;
6893: if (n) *n = mat->cmap->n;
6894: PetscFunctionReturn(PETSC_SUCCESS);
6895: }
6897: /*@
6898: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6899: vector one multiplies this matrix by that are owned by this processor.
6901: Not Collective, unless matrix has not been allocated, then collective
6903: Input Parameter:
6904: . mat - the matrix
6906: Output Parameters:
6907: + m - the global index of the first local column, use `NULL` to not obtain this value
6908: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6910: Level: developer
6912: Notes:
6913: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6915: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6916: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6918: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6919: the local values in the matrix.
6921: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6922: Layouts](sec_matlayout) for details on matrix layouts.
6924: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6925: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6926: @*/
6927: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6928: {
6929: PetscFunctionBegin;
6932: if (m) PetscAssertPointer(m, 2);
6933: if (n) PetscAssertPointer(n, 3);
6934: MatCheckPreallocated(mat, 1);
6935: if (m) *m = mat->cmap->rstart;
6936: if (n) *n = mat->cmap->rend;
6937: PetscFunctionReturn(PETSC_SUCCESS);
6938: }
6940: /*@
6941: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6942: this MPI process.
6944: Not Collective
6946: Input Parameter:
6947: . mat - the matrix
6949: Output Parameters:
6950: + m - the global index of the first local row, use `NULL` to not obtain this value
6951: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6953: Level: beginner
6955: Notes:
6956: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6958: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6959: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6961: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6962: the local values in the matrix.
6964: The high argument is one more than the last element stored locally.
6966: For all matrices it returns the range of matrix rows associated with rows of a vector that
6967: would contain the result of a matrix vector product with this matrix. See [Matrix
6968: Layouts](sec_matlayout) for details on matrix layouts.
6970: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
6971: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6972: @*/
6973: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6974: {
6975: PetscFunctionBegin;
6978: if (m) PetscAssertPointer(m, 2);
6979: if (n) PetscAssertPointer(n, 3);
6980: MatCheckPreallocated(mat, 1);
6981: if (m) *m = mat->rmap->rstart;
6982: if (n) *n = mat->rmap->rend;
6983: PetscFunctionReturn(PETSC_SUCCESS);
6984: }
6986: /*@C
6987: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6988: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6990: Not Collective, unless matrix has not been allocated
6992: Input Parameter:
6993: . mat - the matrix
6995: Output Parameter:
6996: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
6997: where `size` is the number of MPI processes used by `mat`
6999: Level: beginner
7001: Notes:
7002: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7004: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7005: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7007: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7008: the local values in the matrix.
7010: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
7011: would contain the result of a matrix vector product with this matrix. See [Matrix
7012: Layouts](sec_matlayout) for details on matrix layouts.
7014: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
7015: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
7016: `DMDAGetGhostCorners()`, `DM`
7017: @*/
7018: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
7019: {
7020: PetscFunctionBegin;
7023: MatCheckPreallocated(mat, 1);
7024: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
7025: PetscFunctionReturn(PETSC_SUCCESS);
7026: }
7028: /*@C
7029: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
7030: vector one multiplies this vector by that are owned by each processor.
7032: Not Collective, unless matrix has not been allocated
7034: Input Parameter:
7035: . mat - the matrix
7037: Output Parameter:
7038: . ranges - start of each processors portion plus one more than the total length at the end
7040: Level: beginner
7042: Notes:
7043: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7045: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7046: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7048: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7049: the local values in the matrix.
7051: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
7052: Layouts](sec_matlayout) for details on matrix layouts.
7054: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
7055: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
7056: `DMDAGetGhostCorners()`, `DM`
7057: @*/
7058: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
7059: {
7060: PetscFunctionBegin;
7063: MatCheckPreallocated(mat, 1);
7064: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
7065: PetscFunctionReturn(PETSC_SUCCESS);
7066: }
7068: /*@
7069: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7071: Not Collective
7073: Input Parameter:
7074: . A - matrix
7076: Output Parameters:
7077: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7078: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7080: Level: intermediate
7082: Note:
7083: You should call `ISDestroy()` on the returned `IS`
7085: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7086: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7087: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7088: details on matrix layouts.
7090: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7091: @*/
7092: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7093: {
7094: PetscErrorCode (*f)(Mat, IS *, IS *);
7096: PetscFunctionBegin;
7099: MatCheckPreallocated(A, 1);
7100: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7101: if (f) {
7102: PetscCall((*f)(A, rows, cols));
7103: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7104: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7105: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7106: }
7107: PetscFunctionReturn(PETSC_SUCCESS);
7108: }
7110: /*@
7111: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7112: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7113: to complete the factorization.
7115: Collective
7117: Input Parameters:
7118: + fact - the factorized matrix obtained with `MatGetFactor()`
7119: . mat - the matrix
7120: . row - row permutation
7121: . col - column permutation
7122: - info - structure containing
7123: .vb
7124: levels - number of levels of fill.
7125: expected fill - as ratio of original fill.
7126: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7127: missing diagonal entries)
7128: .ve
7130: Level: developer
7132: Notes:
7133: See [Matrix Factorization](sec_matfactor) for additional information.
7135: Most users should employ the `KSP` interface for linear solvers
7136: instead of working directly with matrix algebra routines such as this.
7137: See, e.g., `KSPCreate()`.
7139: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7141: Developer Note:
7142: The Fortran interface is not autogenerated as the
7143: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7145: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
7146: `MatGetOrdering()`, `MatFactorInfo`
7147: @*/
7148: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7149: {
7150: PetscFunctionBegin;
7155: PetscAssertPointer(info, 5);
7156: PetscAssertPointer(fact, 1);
7157: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7158: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7159: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7160: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7161: MatCheckPreallocated(mat, 2);
7163: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7164: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7165: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7166: PetscFunctionReturn(PETSC_SUCCESS);
7167: }
7169: /*@
7170: MatICCFactorSymbolic - Performs symbolic incomplete
7171: Cholesky factorization for a symmetric matrix. Use
7172: `MatCholeskyFactorNumeric()` to complete the factorization.
7174: Collective
7176: Input Parameters:
7177: + fact - the factorized matrix obtained with `MatGetFactor()`
7178: . mat - the matrix to be factored
7179: . perm - row and column permutation
7180: - info - structure containing
7181: .vb
7182: levels - number of levels of fill.
7183: expected fill - as ratio of original fill.
7184: .ve
7186: Level: developer
7188: Notes:
7189: Most users should employ the `KSP` interface for linear solvers
7190: instead of working directly with matrix algebra routines such as this.
7191: See, e.g., `KSPCreate()`.
7193: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7195: Developer Note:
7196: The Fortran interface is not autogenerated as the
7197: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7199: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7200: @*/
7201: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7202: {
7203: PetscFunctionBegin;
7207: PetscAssertPointer(info, 4);
7208: PetscAssertPointer(fact, 1);
7209: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7210: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7211: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7212: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7213: MatCheckPreallocated(mat, 2);
7215: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7216: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7217: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7218: PetscFunctionReturn(PETSC_SUCCESS);
7219: }
7221: /*@C
7222: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7223: points to an array of valid matrices, they may be reused to store the new
7224: submatrices.
7226: Collective
7228: Input Parameters:
7229: + mat - the matrix
7230: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7231: . irow - index set of rows to extract
7232: . icol - index set of columns to extract
7233: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7235: Output Parameter:
7236: . submat - the array of submatrices
7238: Level: advanced
7240: Notes:
7241: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7242: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7243: to extract a parallel submatrix.
7245: Some matrix types place restrictions on the row and column
7246: indices, such as that they be sorted or that they be equal to each other.
7248: The index sets may not have duplicate entries.
7250: When extracting submatrices from a parallel matrix, each processor can
7251: form a different submatrix by setting the rows and columns of its
7252: individual index sets according to the local submatrix desired.
7254: When finished using the submatrices, the user should destroy
7255: them with `MatDestroySubMatrices()`.
7257: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7258: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7260: This routine creates the matrices in submat; you should NOT create them before
7261: calling it. It also allocates the array of matrix pointers submat.
7263: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7264: request one row/column in a block, they must request all rows/columns that are in
7265: that block. For example, if the block size is 2 you cannot request just row 0 and
7266: column 0.
7268: Fortran Note:
7269: One must pass in as `submat` a `Mat` array of size at least `n`+1.
7271: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7272: @*/
7273: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7274: {
7275: PetscInt i;
7276: PetscBool eq;
7278: PetscFunctionBegin;
7281: if (n) {
7282: PetscAssertPointer(irow, 3);
7284: PetscAssertPointer(icol, 4);
7286: }
7287: PetscAssertPointer(submat, 6);
7288: if (n && scall == MAT_REUSE_MATRIX) {
7289: PetscAssertPointer(*submat, 6);
7291: }
7292: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7293: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7294: MatCheckPreallocated(mat, 1);
7295: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7296: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7297: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7298: for (i = 0; i < n; i++) {
7299: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7300: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7301: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7302: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7303: if (mat->boundtocpu && mat->bindingpropagates) {
7304: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7305: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7306: }
7307: #endif
7308: }
7309: PetscFunctionReturn(PETSC_SUCCESS);
7310: }
7312: /*@C
7313: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of `IS` that may live on subcomms).
7315: Collective
7317: Input Parameters:
7318: + mat - the matrix
7319: . n - the number of submatrixes to be extracted
7320: . irow - index set of rows to extract
7321: . icol - index set of columns to extract
7322: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7324: Output Parameter:
7325: . submat - the array of submatrices
7327: Level: advanced
7329: Note:
7330: This is used by `PCGASM`
7332: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7333: @*/
7334: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7335: {
7336: PetscInt i;
7337: PetscBool eq;
7339: PetscFunctionBegin;
7342: if (n) {
7343: PetscAssertPointer(irow, 3);
7345: PetscAssertPointer(icol, 4);
7347: }
7348: PetscAssertPointer(submat, 6);
7349: if (n && scall == MAT_REUSE_MATRIX) {
7350: PetscAssertPointer(*submat, 6);
7352: }
7353: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7354: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7355: MatCheckPreallocated(mat, 1);
7357: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7358: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7359: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7360: for (i = 0; i < n; i++) {
7361: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7362: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7363: }
7364: PetscFunctionReturn(PETSC_SUCCESS);
7365: }
7367: /*@C
7368: MatDestroyMatrices - Destroys an array of matrices.
7370: Collective
7372: Input Parameters:
7373: + n - the number of local matrices
7374: - mat - the matrices (this is a pointer to the array of matrices)
7376: Level: advanced
7378: Notes:
7379: Frees not only the matrices, but also the array that contains the matrices
7381: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7383: Fortran Note:
7384: Does not free the `mat` array.
7386: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7387: @*/
7388: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7389: {
7390: PetscInt i;
7392: PetscFunctionBegin;
7393: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7394: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7395: PetscAssertPointer(mat, 2);
7397: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7399: /* memory is allocated even if n = 0 */
7400: PetscCall(PetscFree(*mat));
7401: PetscFunctionReturn(PETSC_SUCCESS);
7402: }
7404: /*@C
7405: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7407: Collective
7409: Input Parameters:
7410: + n - the number of local matrices
7411: - mat - the matrices (this is a pointer to the array of matrices, just to match the calling
7412: sequence of `MatCreateSubMatrices()`)
7414: Level: advanced
7416: Note:
7417: Frees not only the matrices, but also the array that contains the matrices
7419: Fortran Note:
7420: Does not free the `mat` array.
7422: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7423: @*/
7424: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7425: {
7426: Mat mat0;
7428: PetscFunctionBegin;
7429: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7430: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7431: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7432: PetscAssertPointer(mat, 2);
7434: mat0 = (*mat)[0];
7435: if (mat0 && mat0->ops->destroysubmatrices) {
7436: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7437: } else {
7438: PetscCall(MatDestroyMatrices(n, mat));
7439: }
7440: PetscFunctionReturn(PETSC_SUCCESS);
7441: }
7443: /*@
7444: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7446: Collective
7448: Input Parameter:
7449: . mat - the matrix
7451: Output Parameter:
7452: . matstruct - the sequential matrix with the nonzero structure of `mat`
7454: Level: developer
7456: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7457: @*/
7458: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7459: {
7460: PetscFunctionBegin;
7462: PetscAssertPointer(matstruct, 2);
7465: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7466: MatCheckPreallocated(mat, 1);
7468: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7469: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7470: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7471: PetscFunctionReturn(PETSC_SUCCESS);
7472: }
7474: /*@C
7475: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7477: Collective
7479: Input Parameter:
7480: . mat - the matrix
7482: Level: advanced
7484: Note:
7485: This is not needed, one can just call `MatDestroy()`
7487: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7488: @*/
7489: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7490: {
7491: PetscFunctionBegin;
7492: PetscAssertPointer(mat, 1);
7493: PetscCall(MatDestroy(mat));
7494: PetscFunctionReturn(PETSC_SUCCESS);
7495: }
7497: /*@
7498: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7499: replaces the index sets by larger ones that represent submatrices with
7500: additional overlap.
7502: Collective
7504: Input Parameters:
7505: + mat - the matrix
7506: . n - the number of index sets
7507: . is - the array of index sets (these index sets will changed during the call)
7508: - ov - the additional overlap requested
7510: Options Database Key:
7511: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7513: Level: developer
7515: Note:
7516: The computed overlap preserves the matrix block sizes when the blocks are square.
7517: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7518: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7520: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7521: @*/
7522: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7523: {
7524: PetscInt i, bs, cbs;
7526: PetscFunctionBegin;
7530: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7531: if (n) {
7532: PetscAssertPointer(is, 3);
7534: }
7535: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7536: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7537: MatCheckPreallocated(mat, 1);
7539: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7540: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7541: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7542: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7543: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7544: if (bs == cbs) {
7545: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7546: }
7547: PetscFunctionReturn(PETSC_SUCCESS);
7548: }
7550: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7552: /*@
7553: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7554: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7555: additional overlap.
7557: Collective
7559: Input Parameters:
7560: + mat - the matrix
7561: . n - the number of index sets
7562: . is - the array of index sets (these index sets will changed during the call)
7563: - ov - the additional overlap requested
7565: ` Options Database Key:
7566: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7568: Level: developer
7570: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7571: @*/
7572: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7573: {
7574: PetscInt i;
7576: PetscFunctionBegin;
7579: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7580: if (n) {
7581: PetscAssertPointer(is, 3);
7583: }
7584: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7585: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7586: MatCheckPreallocated(mat, 1);
7587: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7588: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7589: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7590: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7591: PetscFunctionReturn(PETSC_SUCCESS);
7592: }
7594: /*@
7595: MatGetBlockSize - Returns the matrix block size.
7597: Not Collective
7599: Input Parameter:
7600: . mat - the matrix
7602: Output Parameter:
7603: . bs - block size
7605: Level: intermediate
7607: Notes:
7608: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7610: If the block size has not been set yet this routine returns 1.
7612: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7613: @*/
7614: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7615: {
7616: PetscFunctionBegin;
7618: PetscAssertPointer(bs, 2);
7619: *bs = PetscAbs(mat->rmap->bs);
7620: PetscFunctionReturn(PETSC_SUCCESS);
7621: }
7623: /*@
7624: MatGetBlockSizes - Returns the matrix block row and column sizes.
7626: Not Collective
7628: Input Parameter:
7629: . mat - the matrix
7631: Output Parameters:
7632: + rbs - row block size
7633: - cbs - column block size
7635: Level: intermediate
7637: Notes:
7638: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7639: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7641: If a block size has not been set yet this routine returns 1.
7643: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7644: @*/
7645: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7646: {
7647: PetscFunctionBegin;
7649: if (rbs) PetscAssertPointer(rbs, 2);
7650: if (cbs) PetscAssertPointer(cbs, 3);
7651: if (rbs) *rbs = PetscAbs(mat->rmap->bs);
7652: if (cbs) *cbs = PetscAbs(mat->cmap->bs);
7653: PetscFunctionReturn(PETSC_SUCCESS);
7654: }
7656: /*@
7657: MatSetBlockSize - Sets the matrix block size.
7659: Logically Collective
7661: Input Parameters:
7662: + mat - the matrix
7663: - bs - block size
7665: Level: intermediate
7667: Notes:
7668: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7669: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7671: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7672: is compatible with the matrix local sizes.
7674: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7675: @*/
7676: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7677: {
7678: PetscFunctionBegin;
7681: PetscCall(MatSetBlockSizes(mat, bs, bs));
7682: PetscFunctionReturn(PETSC_SUCCESS);
7683: }
7685: typedef struct {
7686: PetscInt n;
7687: IS *is;
7688: Mat *mat;
7689: PetscObjectState nonzerostate;
7690: Mat C;
7691: } EnvelopeData;
7693: static PetscErrorCode EnvelopeDataDestroy(void **ptr)
7694: {
7695: EnvelopeData *edata = (EnvelopeData *)*ptr;
7697: PetscFunctionBegin;
7698: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7699: PetscCall(PetscFree(edata->is));
7700: PetscCall(PetscFree(edata));
7701: PetscFunctionReturn(PETSC_SUCCESS);
7702: }
7704: /*@
7705: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7706: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7708: Collective
7710: Input Parameter:
7711: . mat - the matrix
7713: Level: intermediate
7715: Notes:
7716: There can be zeros within the blocks
7718: The blocks can overlap between processes, including laying on more than two processes
7720: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7721: @*/
7722: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7723: {
7724: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7725: PetscInt *diag, *odiag, sc;
7726: VecScatter scatter;
7727: PetscScalar *seqv;
7728: const PetscScalar *parv;
7729: const PetscInt *ia, *ja;
7730: PetscBool set, flag, done;
7731: Mat AA = mat, A;
7732: MPI_Comm comm;
7733: PetscMPIInt rank, size, tag;
7734: MPI_Status status;
7735: PetscContainer container;
7736: EnvelopeData *edata;
7737: Vec seq, par;
7738: IS isglobal;
7740: PetscFunctionBegin;
7742: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7743: if (!set || !flag) {
7744: /* TODO: only needs nonzero structure of transpose */
7745: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7746: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7747: }
7748: PetscCall(MatAIJGetLocalMat(AA, &A));
7749: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7750: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7752: PetscCall(MatGetLocalSize(mat, &n, NULL));
7753: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7754: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7755: PetscCallMPI(MPI_Comm_size(comm, &size));
7756: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7758: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7760: if (rank > 0) {
7761: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7762: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7763: }
7764: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7765: for (i = 0; i < n; i++) {
7766: env = PetscMax(env, ja[ia[i + 1] - 1]);
7767: II = rstart + i;
7768: if (env == II) {
7769: starts[lblocks] = tbs;
7770: sizes[lblocks++] = 1 + II - tbs;
7771: tbs = 1 + II;
7772: }
7773: }
7774: if (rank < size - 1) {
7775: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7776: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7777: }
7779: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7780: if (!set || !flag) PetscCall(MatDestroy(&AA));
7781: PetscCall(MatDestroy(&A));
7783: PetscCall(PetscNew(&edata));
7784: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7785: edata->n = lblocks;
7786: /* create IS needed for extracting blocks from the original matrix */
7787: PetscCall(PetscMalloc1(lblocks, &edata->is));
7788: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7790: /* Create the resulting inverse matrix structure with preallocation information */
7791: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7792: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7793: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7794: PetscCall(MatSetType(edata->C, MATAIJ));
7796: /* Communicate the start and end of each row, from each block to the correct rank */
7797: /* TODO: Use PetscSF instead of VecScatter */
7798: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7799: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7800: PetscCall(VecGetArrayWrite(seq, &seqv));
7801: for (PetscInt i = 0; i < lblocks; i++) {
7802: for (PetscInt j = 0; j < sizes[i]; j++) {
7803: seqv[cnt] = starts[i];
7804: seqv[cnt + 1] = starts[i] + sizes[i];
7805: cnt += 2;
7806: }
7807: }
7808: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7809: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7810: sc -= cnt;
7811: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7812: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7813: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7814: PetscCall(ISDestroy(&isglobal));
7815: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7816: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7817: PetscCall(VecScatterDestroy(&scatter));
7818: PetscCall(VecDestroy(&seq));
7819: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7820: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7821: PetscCall(VecGetArrayRead(par, &parv));
7822: cnt = 0;
7823: PetscCall(MatGetSize(mat, NULL, &n));
7824: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7825: PetscInt start, end, d = 0, od = 0;
7827: start = (PetscInt)PetscRealPart(parv[cnt]);
7828: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7829: cnt += 2;
7831: if (start < cstart) {
7832: od += cstart - start + n - cend;
7833: d += cend - cstart;
7834: } else if (start < cend) {
7835: od += n - cend;
7836: d += cend - start;
7837: } else od += n - start;
7838: if (end <= cstart) {
7839: od -= cstart - end + n - cend;
7840: d -= cend - cstart;
7841: } else if (end < cend) {
7842: od -= n - cend;
7843: d -= cend - end;
7844: } else od -= n - end;
7846: odiag[i] = od;
7847: diag[i] = d;
7848: }
7849: PetscCall(VecRestoreArrayRead(par, &parv));
7850: PetscCall(VecDestroy(&par));
7851: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7852: PetscCall(PetscFree2(diag, odiag));
7853: PetscCall(PetscFree2(sizes, starts));
7855: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7856: PetscCall(PetscContainerSetPointer(container, edata));
7857: PetscCall(PetscContainerSetCtxDestroy(container, EnvelopeDataDestroy));
7858: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7859: PetscCall(PetscObjectDereference((PetscObject)container));
7860: PetscFunctionReturn(PETSC_SUCCESS);
7861: }
7863: /*@
7864: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7866: Collective
7868: Input Parameters:
7869: + A - the matrix
7870: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7872: Output Parameter:
7873: . C - matrix with inverted block diagonal of `A`
7875: Level: advanced
7877: Note:
7878: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7880: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7881: @*/
7882: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7883: {
7884: PetscContainer container;
7885: EnvelopeData *edata;
7886: PetscObjectState nonzerostate;
7888: PetscFunctionBegin;
7889: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7890: if (!container) {
7891: PetscCall(MatComputeVariableBlockEnvelope(A));
7892: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7893: }
7894: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7895: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7896: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7897: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7899: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7900: *C = edata->C;
7902: for (PetscInt i = 0; i < edata->n; i++) {
7903: Mat D;
7904: PetscScalar *dvalues;
7906: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7907: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7908: PetscCall(MatSeqDenseInvert(D));
7909: PetscCall(MatDenseGetArray(D, &dvalues));
7910: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7911: PetscCall(MatDestroy(&D));
7912: }
7913: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7914: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7915: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7916: PetscFunctionReturn(PETSC_SUCCESS);
7917: }
7919: /*@
7920: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7922: Not Collective
7924: Input Parameters:
7925: + mat - the matrix
7926: . nblocks - the number of blocks on this process, each block can only exist on a single process
7927: - bsizes - the block sizes
7929: Level: intermediate
7931: Notes:
7932: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7934: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7936: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7937: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7938: @*/
7939: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
7940: {
7941: PetscInt ncnt = 0, nlocal;
7943: PetscFunctionBegin;
7945: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7946: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
7947: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
7948: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7949: PetscCall(PetscFree(mat->bsizes));
7950: mat->nblocks = nblocks;
7951: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7952: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7953: PetscFunctionReturn(PETSC_SUCCESS);
7954: }
7956: /*@C
7957: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7959: Not Collective; No Fortran Support
7961: Input Parameter:
7962: . mat - the matrix
7964: Output Parameters:
7965: + nblocks - the number of blocks on this process
7966: - bsizes - the block sizes
7968: Level: intermediate
7970: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7971: @*/
7972: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
7973: {
7974: PetscFunctionBegin;
7976: if (nblocks) *nblocks = mat->nblocks;
7977: if (bsizes) *bsizes = mat->bsizes;
7978: PetscFunctionReturn(PETSC_SUCCESS);
7979: }
7981: /*@
7982: MatSetBlockSizes - Sets the matrix block row and column sizes.
7984: Logically Collective
7986: Input Parameters:
7987: + mat - the matrix
7988: . rbs - row block size
7989: - cbs - column block size
7991: Level: intermediate
7993: Notes:
7994: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7995: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7996: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7998: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7999: are compatible with the matrix local sizes.
8001: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
8003: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
8004: @*/
8005: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
8006: {
8007: PetscFunctionBegin;
8011: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
8012: if (mat->rmap->refcnt) {
8013: ISLocalToGlobalMapping l2g = NULL;
8014: PetscLayout nmap = NULL;
8016: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
8017: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
8018: PetscCall(PetscLayoutDestroy(&mat->rmap));
8019: mat->rmap = nmap;
8020: mat->rmap->mapping = l2g;
8021: }
8022: if (mat->cmap->refcnt) {
8023: ISLocalToGlobalMapping l2g = NULL;
8024: PetscLayout nmap = NULL;
8026: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
8027: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
8028: PetscCall(PetscLayoutDestroy(&mat->cmap));
8029: mat->cmap = nmap;
8030: mat->cmap->mapping = l2g;
8031: }
8032: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
8033: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
8034: PetscFunctionReturn(PETSC_SUCCESS);
8035: }
8037: /*@
8038: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
8040: Logically Collective
8042: Input Parameters:
8043: + mat - the matrix
8044: . fromRow - matrix from which to copy row block size
8045: - fromCol - matrix from which to copy column block size (can be same as fromRow)
8047: Level: developer
8049: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
8050: @*/
8051: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
8052: {
8053: PetscFunctionBegin;
8057: if (fromRow->rmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
8058: if (fromCol->cmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
8059: PetscFunctionReturn(PETSC_SUCCESS);
8060: }
8062: /*@
8063: MatResidual - Default routine to calculate the residual r = b - Ax
8065: Collective
8067: Input Parameters:
8068: + mat - the matrix
8069: . b - the right-hand-side
8070: - x - the approximate solution
8072: Output Parameter:
8073: . r - location to store the residual
8075: Level: developer
8077: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8078: @*/
8079: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8080: {
8081: PetscFunctionBegin;
8087: MatCheckPreallocated(mat, 1);
8088: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8089: if (!mat->ops->residual) {
8090: PetscCall(MatMult(mat, x, r));
8091: PetscCall(VecAYPX(r, -1.0, b));
8092: } else {
8093: PetscUseTypeMethod(mat, residual, b, x, r);
8094: }
8095: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8096: PetscFunctionReturn(PETSC_SUCCESS);
8097: }
8099: /*MC
8100: MatGetRowIJF90 - Obtains the compressed row storage i and j indices for the local rows of a sparse matrix
8102: Synopsis:
8103: MatGetRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
8105: Not Collective
8107: Input Parameters:
8108: + A - the matrix
8109: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8110: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8111: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8112: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8113: always used.
8115: Output Parameters:
8116: + n - number of local rows in the (possibly compressed) matrix
8117: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
8118: . ja - the column indices
8119: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8120: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8122: Level: developer
8124: Note:
8125: Use `MatRestoreRowIJF90()` when you no longer need access to the data
8127: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatRestoreRowIJF90()`
8128: M*/
8130: /*MC
8131: MatRestoreRowIJF90 - restores the compressed row storage i and j indices for the local rows of a sparse matrix obtained with `MatGetRowIJF90()`
8133: Synopsis:
8134: MatRestoreRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
8136: Not Collective
8138: Input Parameters:
8139: + A - the matrix
8140: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8141: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8142: inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8143: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8144: always used.
8145: . n - number of local rows in the (possibly compressed) matrix
8146: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
8147: . ja - the column indices
8148: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8149: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8151: Level: developer
8153: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatGetRowIJF90()`
8154: M*/
8156: /*@C
8157: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8159: Collective
8161: Input Parameters:
8162: + mat - the matrix
8163: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8164: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8165: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8166: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8167: always used.
8169: Output Parameters:
8170: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8171: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8172: . ja - the column indices, use `NULL` if not needed
8173: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8174: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8176: Level: developer
8178: Notes:
8179: You CANNOT change any of the ia[] or ja[] values.
8181: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8183: Fortran Notes:
8184: Use
8185: .vb
8186: PetscInt, pointer :: ia(:),ja(:)
8187: call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8188: ! Access the ith and jth entries via ia(i) and ja(j)
8189: .ve
8191: `MatGetRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatGetRowIJF90()`
8193: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetRowIJF90()`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8194: @*/
8195: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8196: {
8197: PetscFunctionBegin;
8200: if (n) PetscAssertPointer(n, 5);
8201: if (ia) PetscAssertPointer(ia, 6);
8202: if (ja) PetscAssertPointer(ja, 7);
8203: if (done) PetscAssertPointer(done, 8);
8204: MatCheckPreallocated(mat, 1);
8205: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8206: else {
8207: if (done) *done = PETSC_TRUE;
8208: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8209: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8210: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8211: }
8212: PetscFunctionReturn(PETSC_SUCCESS);
8213: }
8215: /*@C
8216: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8218: Collective
8220: Input Parameters:
8221: + mat - the matrix
8222: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8223: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8224: symmetrized
8225: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8226: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8227: always used.
8228: . n - number of columns in the (possibly compressed) matrix
8229: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8230: - ja - the row indices
8232: Output Parameter:
8233: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8235: Level: developer
8237: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8238: @*/
8239: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8240: {
8241: PetscFunctionBegin;
8244: PetscAssertPointer(n, 5);
8245: if (ia) PetscAssertPointer(ia, 6);
8246: if (ja) PetscAssertPointer(ja, 7);
8247: PetscAssertPointer(done, 8);
8248: MatCheckPreallocated(mat, 1);
8249: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8250: else {
8251: *done = PETSC_TRUE;
8252: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8253: }
8254: PetscFunctionReturn(PETSC_SUCCESS);
8255: }
8257: /*@C
8258: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8260: Collective
8262: Input Parameters:
8263: + mat - the matrix
8264: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8265: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8266: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8267: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8268: always used.
8269: . n - size of (possibly compressed) matrix
8270: . ia - the row pointers
8271: - ja - the column indices
8273: Output Parameter:
8274: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8276: Level: developer
8278: Note:
8279: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8280: us of the array after it has been restored. If you pass `NULL`, it will
8281: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8283: Fortran Note:
8284: `MatRestoreRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatRestoreRowIJF90()`
8286: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreRowIJF90()`, `MatRestoreColumnIJ()`
8287: @*/
8288: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8289: {
8290: PetscFunctionBegin;
8293: if (ia) PetscAssertPointer(ia, 6);
8294: if (ja) PetscAssertPointer(ja, 7);
8295: if (done) PetscAssertPointer(done, 8);
8296: MatCheckPreallocated(mat, 1);
8298: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8299: else {
8300: if (done) *done = PETSC_TRUE;
8301: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8302: if (n) *n = 0;
8303: if (ia) *ia = NULL;
8304: if (ja) *ja = NULL;
8305: }
8306: PetscFunctionReturn(PETSC_SUCCESS);
8307: }
8309: /*@C
8310: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8312: Collective
8314: Input Parameters:
8315: + mat - the matrix
8316: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8317: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8318: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8319: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8320: always used.
8322: Output Parameters:
8323: + n - size of (possibly compressed) matrix
8324: . ia - the column pointers
8325: . ja - the row indices
8326: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8328: Level: developer
8330: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8331: @*/
8332: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8333: {
8334: PetscFunctionBegin;
8337: if (ia) PetscAssertPointer(ia, 6);
8338: if (ja) PetscAssertPointer(ja, 7);
8339: PetscAssertPointer(done, 8);
8340: MatCheckPreallocated(mat, 1);
8342: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8343: else {
8344: *done = PETSC_TRUE;
8345: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8346: if (n) *n = 0;
8347: if (ia) *ia = NULL;
8348: if (ja) *ja = NULL;
8349: }
8350: PetscFunctionReturn(PETSC_SUCCESS);
8351: }
8353: /*@
8354: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8355: `MatGetColumnIJ()`.
8357: Collective
8359: Input Parameters:
8360: + mat - the matrix
8361: . ncolors - maximum color value
8362: . n - number of entries in colorarray
8363: - colorarray - array indicating color for each column
8365: Output Parameter:
8366: . iscoloring - coloring generated using colorarray information
8368: Level: developer
8370: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8371: @*/
8372: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8373: {
8374: PetscFunctionBegin;
8377: PetscAssertPointer(colorarray, 4);
8378: PetscAssertPointer(iscoloring, 5);
8379: MatCheckPreallocated(mat, 1);
8381: if (!mat->ops->coloringpatch) {
8382: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8383: } else {
8384: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8385: }
8386: PetscFunctionReturn(PETSC_SUCCESS);
8387: }
8389: /*@
8390: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8392: Logically Collective
8394: Input Parameter:
8395: . mat - the factored matrix to be reset
8397: Level: developer
8399: Notes:
8400: This routine should be used only with factored matrices formed by in-place
8401: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8402: format). This option can save memory, for example, when solving nonlinear
8403: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8404: ILU(0) preconditioner.
8406: One can specify in-place ILU(0) factorization by calling
8407: .vb
8408: PCType(pc,PCILU);
8409: PCFactorSeUseInPlace(pc);
8410: .ve
8411: or by using the options -pc_type ilu -pc_factor_in_place
8413: In-place factorization ILU(0) can also be used as a local
8414: solver for the blocks within the block Jacobi or additive Schwarz
8415: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8416: for details on setting local solver options.
8418: Most users should employ the `KSP` interface for linear solvers
8419: instead of working directly with matrix algebra routines such as this.
8420: See, e.g., `KSPCreate()`.
8422: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8423: @*/
8424: PetscErrorCode MatSetUnfactored(Mat mat)
8425: {
8426: PetscFunctionBegin;
8429: MatCheckPreallocated(mat, 1);
8430: mat->factortype = MAT_FACTOR_NONE;
8431: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8432: PetscUseTypeMethod(mat, setunfactored);
8433: PetscFunctionReturn(PETSC_SUCCESS);
8434: }
8436: /*MC
8437: MatDenseGetArrayF90 - Accesses a matrix array from Fortran
8439: Synopsis:
8440: MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8442: Not Collective
8444: Input Parameter:
8445: . x - matrix
8447: Output Parameters:
8448: + xx_v - the Fortran pointer to the array
8449: - ierr - error code
8451: Example of Usage:
8452: .vb
8453: PetscScalar, pointer xx_v(:,:)
8454: ....
8455: call MatDenseGetArrayF90(x,xx_v,ierr)
8456: a = xx_v(3)
8457: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8458: .ve
8460: Level: advanced
8462: .seealso: [](ch_matrices), `Mat`, `MatDenseRestoreArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJGetArrayF90()`
8463: M*/
8465: /*MC
8466: MatDenseRestoreArrayF90 - Restores a matrix array that has been
8467: accessed with `MatDenseGetArrayF90()`.
8469: Synopsis:
8470: MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8472: Not Collective
8474: Input Parameters:
8475: + x - matrix
8476: - xx_v - the Fortran90 pointer to the array
8478: Output Parameter:
8479: . ierr - error code
8481: Example of Usage:
8482: .vb
8483: PetscScalar, pointer xx_v(:,:)
8484: ....
8485: call MatDenseGetArrayF90(x,xx_v,ierr)
8486: a = xx_v(3)
8487: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8488: .ve
8490: Level: advanced
8492: .seealso: [](ch_matrices), `Mat`, `MatDenseGetArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJRestoreArrayF90()`
8493: M*/
8495: /*MC
8496: MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran.
8498: Synopsis:
8499: MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8501: Not Collective
8503: Input Parameter:
8504: . x - matrix
8506: Output Parameters:
8507: + xx_v - the Fortran pointer to the array
8508: - ierr - error code
8510: Example of Usage:
8511: .vb
8512: PetscScalar, pointer xx_v(:)
8513: ....
8514: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8515: a = xx_v(3)
8516: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8517: .ve
8519: Level: advanced
8521: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseGetArrayF90()`
8522: M*/
8524: /*MC
8525: MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
8526: accessed with `MatSeqAIJGetArrayF90()`.
8528: Synopsis:
8529: MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8531: Not Collective
8533: Input Parameters:
8534: + x - matrix
8535: - xx_v - the Fortran90 pointer to the array
8537: Output Parameter:
8538: . ierr - error code
8540: Example of Usage:
8541: .vb
8542: PetscScalar, pointer xx_v(:)
8543: ....
8544: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8545: a = xx_v(3)
8546: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8547: .ve
8549: Level: advanced
8551: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseRestoreArrayF90()`
8552: M*/
8554: /*@
8555: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8556: as the original matrix.
8558: Collective
8560: Input Parameters:
8561: + mat - the original matrix
8562: . isrow - parallel `IS` containing the rows this processor should obtain
8563: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8564: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8566: Output Parameter:
8567: . newmat - the new submatrix, of the same type as the original matrix
8569: Level: advanced
8571: Notes:
8572: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8574: Some matrix types place restrictions on the row and column indices, such
8575: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8576: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8578: The index sets may not have duplicate entries.
8580: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8581: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8582: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8583: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8584: you are finished using it.
8586: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8587: the input matrix.
8589: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8591: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8592: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8594: Example usage:
8595: Consider the following 8x8 matrix with 34 non-zero values, that is
8596: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8597: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8598: as follows
8599: .vb
8600: 1 2 0 | 0 3 0 | 0 4
8601: Proc0 0 5 6 | 7 0 0 | 8 0
8602: 9 0 10 | 11 0 0 | 12 0
8603: -------------------------------------
8604: 13 0 14 | 15 16 17 | 0 0
8605: Proc1 0 18 0 | 19 20 21 | 0 0
8606: 0 0 0 | 22 23 0 | 24 0
8607: -------------------------------------
8608: Proc2 25 26 27 | 0 0 28 | 29 0
8609: 30 0 0 | 31 32 33 | 0 34
8610: .ve
8612: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8614: .vb
8615: 2 0 | 0 3 0 | 0
8616: Proc0 5 6 | 7 0 0 | 8
8617: -------------------------------
8618: Proc1 18 0 | 19 20 21 | 0
8619: -------------------------------
8620: Proc2 26 27 | 0 0 28 | 29
8621: 0 0 | 31 32 33 | 0
8622: .ve
8624: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8625: @*/
8626: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8627: {
8628: PetscMPIInt size;
8629: Mat *local;
8630: IS iscoltmp;
8631: PetscBool flg;
8633: PetscFunctionBegin;
8637: PetscAssertPointer(newmat, 5);
8640: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8641: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8643: MatCheckPreallocated(mat, 1);
8644: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8646: if (!iscol || isrow == iscol) {
8647: PetscBool stride;
8648: PetscMPIInt grabentirematrix = 0, grab;
8649: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8650: if (stride) {
8651: PetscInt first, step, n, rstart, rend;
8652: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8653: if (step == 1) {
8654: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8655: if (rstart == first) {
8656: PetscCall(ISGetLocalSize(isrow, &n));
8657: if (n == rend - rstart) grabentirematrix = 1;
8658: }
8659: }
8660: }
8661: PetscCallMPI(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8662: if (grab) {
8663: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8664: if (cll == MAT_INITIAL_MATRIX) {
8665: *newmat = mat;
8666: PetscCall(PetscObjectReference((PetscObject)mat));
8667: }
8668: PetscFunctionReturn(PETSC_SUCCESS);
8669: }
8670: }
8672: if (!iscol) {
8673: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8674: } else {
8675: iscoltmp = iscol;
8676: }
8678: /* if original matrix is on just one processor then use submatrix generated */
8679: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8680: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8681: goto setproperties;
8682: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8683: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8684: *newmat = *local;
8685: PetscCall(PetscFree(local));
8686: goto setproperties;
8687: } else if (!mat->ops->createsubmatrix) {
8688: /* Create a new matrix type that implements the operation using the full matrix */
8689: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8690: switch (cll) {
8691: case MAT_INITIAL_MATRIX:
8692: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8693: break;
8694: case MAT_REUSE_MATRIX:
8695: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8696: break;
8697: default:
8698: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8699: }
8700: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8701: goto setproperties;
8702: }
8704: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8705: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8706: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8708: setproperties:
8709: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8710: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8711: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8712: }
8713: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8714: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8715: PetscFunctionReturn(PETSC_SUCCESS);
8716: }
8718: /*@
8719: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8721: Not Collective
8723: Input Parameters:
8724: + A - the matrix we wish to propagate options from
8725: - B - the matrix we wish to propagate options to
8727: Level: beginner
8729: Note:
8730: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8732: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8733: @*/
8734: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8735: {
8736: PetscFunctionBegin;
8739: B->symmetry_eternal = A->symmetry_eternal;
8740: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8741: B->symmetric = A->symmetric;
8742: B->structurally_symmetric = A->structurally_symmetric;
8743: B->spd = A->spd;
8744: B->hermitian = A->hermitian;
8745: PetscFunctionReturn(PETSC_SUCCESS);
8746: }
8748: /*@
8749: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8750: used during the assembly process to store values that belong to
8751: other processors.
8753: Not Collective
8755: Input Parameters:
8756: + mat - the matrix
8757: . size - the initial size of the stash.
8758: - bsize - the initial size of the block-stash(if used).
8760: Options Database Keys:
8761: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8762: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8764: Level: intermediate
8766: Notes:
8767: The block-stash is used for values set with `MatSetValuesBlocked()` while
8768: the stash is used for values set with `MatSetValues()`
8770: Run with the option -info and look for output of the form
8771: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8772: to determine the appropriate value, MM, to use for size and
8773: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8774: to determine the value, BMM to use for bsize
8776: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8777: @*/
8778: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8779: {
8780: PetscFunctionBegin;
8783: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8784: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8785: PetscFunctionReturn(PETSC_SUCCESS);
8786: }
8788: /*@
8789: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8790: the matrix
8792: Neighbor-wise Collective
8794: Input Parameters:
8795: + A - the matrix
8796: . x - the vector to be multiplied by the interpolation operator
8797: - y - the vector to be added to the result
8799: Output Parameter:
8800: . w - the resulting vector
8802: Level: intermediate
8804: Notes:
8805: `w` may be the same vector as `y`.
8807: This allows one to use either the restriction or interpolation (its transpose)
8808: matrix to do the interpolation
8810: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8811: @*/
8812: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8813: {
8814: PetscInt M, N, Ny;
8816: PetscFunctionBegin;
8821: PetscCall(MatGetSize(A, &M, &N));
8822: PetscCall(VecGetSize(y, &Ny));
8823: if (M == Ny) {
8824: PetscCall(MatMultAdd(A, x, y, w));
8825: } else {
8826: PetscCall(MatMultTransposeAdd(A, x, y, w));
8827: }
8828: PetscFunctionReturn(PETSC_SUCCESS);
8829: }
8831: /*@
8832: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8833: the matrix
8835: Neighbor-wise Collective
8837: Input Parameters:
8838: + A - the matrix
8839: - x - the vector to be interpolated
8841: Output Parameter:
8842: . y - the resulting vector
8844: Level: intermediate
8846: Note:
8847: This allows one to use either the restriction or interpolation (its transpose)
8848: matrix to do the interpolation
8850: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8851: @*/
8852: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8853: {
8854: PetscInt M, N, Ny;
8856: PetscFunctionBegin;
8860: PetscCall(MatGetSize(A, &M, &N));
8861: PetscCall(VecGetSize(y, &Ny));
8862: if (M == Ny) {
8863: PetscCall(MatMult(A, x, y));
8864: } else {
8865: PetscCall(MatMultTranspose(A, x, y));
8866: }
8867: PetscFunctionReturn(PETSC_SUCCESS);
8868: }
8870: /*@
8871: MatRestrict - $y = A*x$ or $A^T*x$
8873: Neighbor-wise Collective
8875: Input Parameters:
8876: + A - the matrix
8877: - x - the vector to be restricted
8879: Output Parameter:
8880: . y - the resulting vector
8882: Level: intermediate
8884: Note:
8885: This allows one to use either the restriction or interpolation (its transpose)
8886: matrix to do the restriction
8888: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8889: @*/
8890: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8891: {
8892: PetscInt M, N, Nx;
8894: PetscFunctionBegin;
8898: PetscCall(MatGetSize(A, &M, &N));
8899: PetscCall(VecGetSize(x, &Nx));
8900: if (M == Nx) {
8901: PetscCall(MatMultTranspose(A, x, y));
8902: } else {
8903: PetscCall(MatMult(A, x, y));
8904: }
8905: PetscFunctionReturn(PETSC_SUCCESS);
8906: }
8908: /*@
8909: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8911: Neighbor-wise Collective
8913: Input Parameters:
8914: + A - the matrix
8915: . x - the input dense matrix to be multiplied
8916: - w - the input dense matrix to be added to the result
8918: Output Parameter:
8919: . y - the output dense matrix
8921: Level: intermediate
8923: Note:
8924: This allows one to use either the restriction or interpolation (its transpose)
8925: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8926: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8928: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8929: @*/
8930: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8931: {
8932: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8933: PetscBool trans = PETSC_TRUE;
8934: MatReuse reuse = MAT_INITIAL_MATRIX;
8936: PetscFunctionBegin;
8942: PetscCall(MatGetSize(A, &M, &N));
8943: PetscCall(MatGetSize(x, &Mx, &Nx));
8944: if (N == Mx) trans = PETSC_FALSE;
8945: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8946: Mo = trans ? N : M;
8947: if (*y) {
8948: PetscCall(MatGetSize(*y, &My, &Ny));
8949: if (Mo == My && Nx == Ny) {
8950: reuse = MAT_REUSE_MATRIX;
8951: } else {
8952: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8953: PetscCall(MatDestroy(y));
8954: }
8955: }
8957: if (w && *y == w) { /* this is to minimize changes in PCMG */
8958: PetscBool flg;
8960: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8961: if (w) {
8962: PetscInt My, Ny, Mw, Nw;
8964: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8965: PetscCall(MatGetSize(*y, &My, &Ny));
8966: PetscCall(MatGetSize(w, &Mw, &Nw));
8967: if (!flg || My != Mw || Ny != Nw) w = NULL;
8968: }
8969: if (!w) {
8970: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8971: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8972: PetscCall(PetscObjectDereference((PetscObject)w));
8973: } else {
8974: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8975: }
8976: }
8977: if (!trans) {
8978: PetscCall(MatMatMult(A, x, reuse, PETSC_DETERMINE, y));
8979: } else {
8980: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DETERMINE, y));
8981: }
8982: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8983: PetscFunctionReturn(PETSC_SUCCESS);
8984: }
8986: /*@
8987: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8989: Neighbor-wise Collective
8991: Input Parameters:
8992: + A - the matrix
8993: - x - the input dense matrix
8995: Output Parameter:
8996: . y - the output dense matrix
8998: Level: intermediate
9000: Note:
9001: This allows one to use either the restriction or interpolation (its transpose)
9002: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
9003: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
9005: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
9006: @*/
9007: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
9008: {
9009: PetscFunctionBegin;
9010: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
9011: PetscFunctionReturn(PETSC_SUCCESS);
9012: }
9014: /*@
9015: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
9017: Neighbor-wise Collective
9019: Input Parameters:
9020: + A - the matrix
9021: - x - the input dense matrix
9023: Output Parameter:
9024: . y - the output dense matrix
9026: Level: intermediate
9028: Note:
9029: This allows one to use either the restriction or interpolation (its transpose)
9030: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
9031: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
9033: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
9034: @*/
9035: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
9036: {
9037: PetscFunctionBegin;
9038: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
9039: PetscFunctionReturn(PETSC_SUCCESS);
9040: }
9042: /*@
9043: MatGetNullSpace - retrieves the null space of a matrix.
9045: Logically Collective
9047: Input Parameters:
9048: + mat - the matrix
9049: - nullsp - the null space object
9051: Level: developer
9053: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
9054: @*/
9055: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
9056: {
9057: PetscFunctionBegin;
9059: PetscAssertPointer(nullsp, 2);
9060: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
9061: PetscFunctionReturn(PETSC_SUCCESS);
9062: }
9064: /*@C
9065: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
9067: Logically Collective
9069: Input Parameters:
9070: + n - the number of matrices
9071: - mat - the array of matrices
9073: Output Parameters:
9074: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
9076: Level: developer
9078: Note:
9079: Call `MatRestoreNullspaces()` to provide these to another array of matrices
9081: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9082: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
9083: @*/
9084: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9085: {
9086: PetscFunctionBegin;
9087: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9088: PetscAssertPointer(mat, 2);
9089: PetscAssertPointer(nullsp, 3);
9091: PetscCall(PetscCalloc1(3 * n, nullsp));
9092: for (PetscInt i = 0; i < n; i++) {
9094: (*nullsp)[i] = mat[i]->nullsp;
9095: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
9096: (*nullsp)[n + i] = mat[i]->nearnullsp;
9097: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
9098: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
9099: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
9100: }
9101: PetscFunctionReturn(PETSC_SUCCESS);
9102: }
9104: /*@C
9105: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
9107: Logically Collective
9109: Input Parameters:
9110: + n - the number of matrices
9111: . mat - the array of matrices
9112: - nullsp - an array of null spaces
9114: Level: developer
9116: Note:
9117: Call `MatGetNullSpaces()` to create `nullsp`
9119: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9120: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
9121: @*/
9122: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9123: {
9124: PetscFunctionBegin;
9125: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9126: PetscAssertPointer(mat, 2);
9127: PetscAssertPointer(nullsp, 3);
9128: PetscAssertPointer(*nullsp, 3);
9130: for (PetscInt i = 0; i < n; i++) {
9132: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
9133: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
9134: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
9135: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
9136: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
9137: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
9138: }
9139: PetscCall(PetscFree(*nullsp));
9140: PetscFunctionReturn(PETSC_SUCCESS);
9141: }
9143: /*@
9144: MatSetNullSpace - attaches a null space to a matrix.
9146: Logically Collective
9148: Input Parameters:
9149: + mat - the matrix
9150: - nullsp - the null space object
9152: Level: advanced
9154: Notes:
9155: This null space is used by the `KSP` linear solvers to solve singular systems.
9157: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
9159: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge to
9160: to zero but the linear system will still be solved in a least squares sense.
9162: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
9163: the domain of a matrix A (from $R^n$ to $R^m$ (m rows, n columns) $R^n$ = the direct sum of the null space of A, n(A), + the range of $A^T$, $R(A^T)$.
9164: Similarly $R^m$ = direct sum n($A^T$) + R(A). Hence the linear system $A x = b$ has a solution only if b in R(A) (or correspondingly b is orthogonal to
9165: n($A^T$)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
9166: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n($A^T$).
9167: This \hat{b} can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
9169: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one as called
9170: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9171: routine also automatically calls `MatSetTransposeNullSpace()`.
9173: The user should call `MatNullSpaceDestroy()`.
9175: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9176: `KSPSetPCSide()`
9177: @*/
9178: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9179: {
9180: PetscFunctionBegin;
9183: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9184: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9185: mat->nullsp = nullsp;
9186: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9187: PetscFunctionReturn(PETSC_SUCCESS);
9188: }
9190: /*@
9191: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
9193: Logically Collective
9195: Input Parameters:
9196: + mat - the matrix
9197: - nullsp - the null space object
9199: Level: developer
9201: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9202: @*/
9203: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9204: {
9205: PetscFunctionBegin;
9208: PetscAssertPointer(nullsp, 2);
9209: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9210: PetscFunctionReturn(PETSC_SUCCESS);
9211: }
9213: /*@
9214: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
9216: Logically Collective
9218: Input Parameters:
9219: + mat - the matrix
9220: - nullsp - the null space object
9222: Level: advanced
9224: Notes:
9225: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9227: See `MatSetNullSpace()`
9229: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9230: @*/
9231: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9232: {
9233: PetscFunctionBegin;
9236: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9237: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9238: mat->transnullsp = nullsp;
9239: PetscFunctionReturn(PETSC_SUCCESS);
9240: }
9242: /*@
9243: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9244: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9246: Logically Collective
9248: Input Parameters:
9249: + mat - the matrix
9250: - nullsp - the null space object
9252: Level: advanced
9254: Notes:
9255: Overwrites any previous near null space that may have been attached
9257: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9259: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9260: @*/
9261: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9262: {
9263: PetscFunctionBegin;
9267: MatCheckPreallocated(mat, 1);
9268: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9269: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9270: mat->nearnullsp = nullsp;
9271: PetscFunctionReturn(PETSC_SUCCESS);
9272: }
9274: /*@
9275: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9277: Not Collective
9279: Input Parameter:
9280: . mat - the matrix
9282: Output Parameter:
9283: . nullsp - the null space object, `NULL` if not set
9285: Level: advanced
9287: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9288: @*/
9289: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9290: {
9291: PetscFunctionBegin;
9294: PetscAssertPointer(nullsp, 2);
9295: MatCheckPreallocated(mat, 1);
9296: *nullsp = mat->nearnullsp;
9297: PetscFunctionReturn(PETSC_SUCCESS);
9298: }
9300: /*@
9301: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9303: Collective
9305: Input Parameters:
9306: + mat - the matrix
9307: . row - row/column permutation
9308: - info - information on desired factorization process
9310: Level: developer
9312: Notes:
9313: Probably really in-place only when level of fill is zero, otherwise allocates
9314: new space to store factored matrix and deletes previous memory.
9316: Most users should employ the `KSP` interface for linear solvers
9317: instead of working directly with matrix algebra routines such as this.
9318: See, e.g., `KSPCreate()`.
9320: Developer Note:
9321: The Fortran interface is not autogenerated as the
9322: interface definition cannot be generated correctly [due to `MatFactorInfo`]
9324: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9325: @*/
9326: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9327: {
9328: PetscFunctionBegin;
9332: PetscAssertPointer(info, 3);
9333: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9334: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9335: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9336: MatCheckPreallocated(mat, 1);
9337: PetscUseTypeMethod(mat, iccfactor, row, info);
9338: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9339: PetscFunctionReturn(PETSC_SUCCESS);
9340: }
9342: /*@
9343: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9344: ghosted ones.
9346: Not Collective
9348: Input Parameters:
9349: + mat - the matrix
9350: - diag - the diagonal values, including ghost ones
9352: Level: developer
9354: Notes:
9355: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9357: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9359: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9360: @*/
9361: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9362: {
9363: PetscMPIInt size;
9365: PetscFunctionBegin;
9370: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9371: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9372: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9373: if (size == 1) {
9374: PetscInt n, m;
9375: PetscCall(VecGetSize(diag, &n));
9376: PetscCall(MatGetSize(mat, NULL, &m));
9377: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9378: PetscCall(MatDiagonalScale(mat, NULL, diag));
9379: } else {
9380: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9381: }
9382: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9383: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9384: PetscFunctionReturn(PETSC_SUCCESS);
9385: }
9387: /*@
9388: MatGetInertia - Gets the inertia from a factored matrix
9390: Collective
9392: Input Parameter:
9393: . mat - the matrix
9395: Output Parameters:
9396: + nneg - number of negative eigenvalues
9397: . nzero - number of zero eigenvalues
9398: - npos - number of positive eigenvalues
9400: Level: advanced
9402: Note:
9403: Matrix must have been factored by `MatCholeskyFactor()`
9405: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9406: @*/
9407: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9408: {
9409: PetscFunctionBegin;
9412: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9413: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9414: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9415: PetscFunctionReturn(PETSC_SUCCESS);
9416: }
9418: /*@C
9419: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9421: Neighbor-wise Collective
9423: Input Parameters:
9424: + mat - the factored matrix obtained with `MatGetFactor()`
9425: - b - the right-hand-side vectors
9427: Output Parameter:
9428: . x - the result vectors
9430: Level: developer
9432: Note:
9433: The vectors `b` and `x` cannot be the same. I.e., one cannot
9434: call `MatSolves`(A,x,x).
9436: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9437: @*/
9438: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9439: {
9440: PetscFunctionBegin;
9443: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9444: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9445: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9447: MatCheckPreallocated(mat, 1);
9448: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9449: PetscUseTypeMethod(mat, solves, b, x);
9450: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9451: PetscFunctionReturn(PETSC_SUCCESS);
9452: }
9454: /*@
9455: MatIsSymmetric - Test whether a matrix is symmetric
9457: Collective
9459: Input Parameters:
9460: + A - the matrix to test
9461: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9463: Output Parameter:
9464: . flg - the result
9466: Level: intermediate
9468: Notes:
9469: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9471: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9473: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9474: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9476: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9477: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9478: @*/
9479: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9480: {
9481: PetscFunctionBegin;
9483: PetscAssertPointer(flg, 3);
9484: if (A->symmetric != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->symmetric);
9485: else {
9486: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9487: else PetscCall(MatIsTranspose(A, A, tol, flg));
9488: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9489: }
9490: PetscFunctionReturn(PETSC_SUCCESS);
9491: }
9493: /*@
9494: MatIsHermitian - Test whether a matrix is Hermitian
9496: Collective
9498: Input Parameters:
9499: + A - the matrix to test
9500: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9502: Output Parameter:
9503: . flg - the result
9505: Level: intermediate
9507: Notes:
9508: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9510: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9512: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9513: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9515: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9516: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9517: @*/
9518: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9519: {
9520: PetscFunctionBegin;
9522: PetscAssertPointer(flg, 3);
9523: if (A->hermitian != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->hermitian);
9524: else {
9525: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9526: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9527: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9528: }
9529: PetscFunctionReturn(PETSC_SUCCESS);
9530: }
9532: /*@
9533: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9535: Not Collective
9537: Input Parameter:
9538: . A - the matrix to check
9540: Output Parameters:
9541: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9542: - flg - the result (only valid if set is `PETSC_TRUE`)
9544: Level: advanced
9546: Notes:
9547: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9548: if you want it explicitly checked
9550: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9551: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9553: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9554: @*/
9555: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9556: {
9557: PetscFunctionBegin;
9559: PetscAssertPointer(set, 2);
9560: PetscAssertPointer(flg, 3);
9561: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9562: *set = PETSC_TRUE;
9563: *flg = PetscBool3ToBool(A->symmetric);
9564: } else {
9565: *set = PETSC_FALSE;
9566: }
9567: PetscFunctionReturn(PETSC_SUCCESS);
9568: }
9570: /*@
9571: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9573: Not Collective
9575: Input Parameter:
9576: . A - the matrix to check
9578: Output Parameters:
9579: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9580: - flg - the result (only valid if set is `PETSC_TRUE`)
9582: Level: advanced
9584: Notes:
9585: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9587: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9588: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9590: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9591: @*/
9592: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9593: {
9594: PetscFunctionBegin;
9596: PetscAssertPointer(set, 2);
9597: PetscAssertPointer(flg, 3);
9598: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9599: *set = PETSC_TRUE;
9600: *flg = PetscBool3ToBool(A->spd);
9601: } else {
9602: *set = PETSC_FALSE;
9603: }
9604: PetscFunctionReturn(PETSC_SUCCESS);
9605: }
9607: /*@
9608: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9610: Not Collective
9612: Input Parameter:
9613: . A - the matrix to check
9615: Output Parameters:
9616: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9617: - flg - the result (only valid if set is `PETSC_TRUE`)
9619: Level: advanced
9621: Notes:
9622: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9623: if you want it explicitly checked
9625: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9626: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9628: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9629: @*/
9630: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9631: {
9632: PetscFunctionBegin;
9634: PetscAssertPointer(set, 2);
9635: PetscAssertPointer(flg, 3);
9636: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9637: *set = PETSC_TRUE;
9638: *flg = PetscBool3ToBool(A->hermitian);
9639: } else {
9640: *set = PETSC_FALSE;
9641: }
9642: PetscFunctionReturn(PETSC_SUCCESS);
9643: }
9645: /*@
9646: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9648: Collective
9650: Input Parameter:
9651: . A - the matrix to test
9653: Output Parameter:
9654: . flg - the result
9656: Level: intermediate
9658: Notes:
9659: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9661: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9662: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9664: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9665: @*/
9666: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9667: {
9668: PetscFunctionBegin;
9670: PetscAssertPointer(flg, 2);
9671: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9672: *flg = PetscBool3ToBool(A->structurally_symmetric);
9673: } else {
9674: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9675: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9676: }
9677: PetscFunctionReturn(PETSC_SUCCESS);
9678: }
9680: /*@
9681: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9683: Not Collective
9685: Input Parameter:
9686: . A - the matrix to check
9688: Output Parameters:
9689: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9690: - flg - the result (only valid if set is PETSC_TRUE)
9692: Level: advanced
9694: Notes:
9695: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9696: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9698: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9700: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9701: @*/
9702: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9703: {
9704: PetscFunctionBegin;
9706: PetscAssertPointer(set, 2);
9707: PetscAssertPointer(flg, 3);
9708: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9709: *set = PETSC_TRUE;
9710: *flg = PetscBool3ToBool(A->structurally_symmetric);
9711: } else {
9712: *set = PETSC_FALSE;
9713: }
9714: PetscFunctionReturn(PETSC_SUCCESS);
9715: }
9717: /*@
9718: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9719: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9721: Not Collective
9723: Input Parameter:
9724: . mat - the matrix
9726: Output Parameters:
9727: + nstash - the size of the stash
9728: . reallocs - the number of additional mallocs incurred.
9729: . bnstash - the size of the block stash
9730: - breallocs - the number of additional mallocs incurred.in the block stash
9732: Level: advanced
9734: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9735: @*/
9736: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9737: {
9738: PetscFunctionBegin;
9739: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9740: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9741: PetscFunctionReturn(PETSC_SUCCESS);
9742: }
9744: /*@
9745: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9746: parallel layout, `PetscLayout` for rows and columns
9748: Collective
9750: Input Parameter:
9751: . mat - the matrix
9753: Output Parameters:
9754: + right - (optional) vector that the matrix can be multiplied against
9755: - left - (optional) vector that the matrix vector product can be stored in
9757: Level: advanced
9759: Notes:
9760: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9762: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9764: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9765: @*/
9766: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9767: {
9768: PetscFunctionBegin;
9771: if (mat->ops->getvecs) {
9772: PetscUseTypeMethod(mat, getvecs, right, left);
9773: } else {
9774: if (right) {
9775: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9776: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9777: PetscCall(VecSetType(*right, mat->defaultvectype));
9778: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9779: if (mat->boundtocpu && mat->bindingpropagates) {
9780: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9781: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9782: }
9783: #endif
9784: }
9785: if (left) {
9786: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9787: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9788: PetscCall(VecSetType(*left, mat->defaultvectype));
9789: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9790: if (mat->boundtocpu && mat->bindingpropagates) {
9791: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9792: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9793: }
9794: #endif
9795: }
9796: }
9797: PetscFunctionReturn(PETSC_SUCCESS);
9798: }
9800: /*@
9801: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9802: with default values.
9804: Not Collective
9806: Input Parameter:
9807: . info - the `MatFactorInfo` data structure
9809: Level: developer
9811: Notes:
9812: The solvers are generally used through the `KSP` and `PC` objects, for example
9813: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9815: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9817: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9818: @*/
9819: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9820: {
9821: PetscFunctionBegin;
9822: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9823: PetscFunctionReturn(PETSC_SUCCESS);
9824: }
9826: /*@
9827: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9829: Collective
9831: Input Parameters:
9832: + mat - the factored matrix
9833: - is - the index set defining the Schur indices (0-based)
9835: Level: advanced
9837: Notes:
9838: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9840: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9842: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9844: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9845: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9846: @*/
9847: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9848: {
9849: PetscErrorCode (*f)(Mat, IS);
9851: PetscFunctionBegin;
9856: PetscCheckSameComm(mat, 1, is, 2);
9857: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9858: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9859: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9860: PetscCall(MatDestroy(&mat->schur));
9861: PetscCall((*f)(mat, is));
9862: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9863: PetscFunctionReturn(PETSC_SUCCESS);
9864: }
9866: /*@
9867: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9869: Logically Collective
9871: Input Parameters:
9872: + F - the factored matrix obtained by calling `MatGetFactor()`
9873: . S - location where to return the Schur complement, can be `NULL`
9874: - status - the status of the Schur complement matrix, can be `NULL`
9876: Level: advanced
9878: Notes:
9879: You must call `MatFactorSetSchurIS()` before calling this routine.
9881: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9883: The routine provides a copy of the Schur matrix stored within the solver data structures.
9884: The caller must destroy the object when it is no longer needed.
9885: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9887: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9889: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9891: Developer Note:
9892: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9893: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9895: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9896: @*/
9897: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9898: {
9899: PetscFunctionBegin;
9901: if (S) PetscAssertPointer(S, 2);
9902: if (status) PetscAssertPointer(status, 3);
9903: if (S) {
9904: PetscErrorCode (*f)(Mat, Mat *);
9906: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9907: if (f) {
9908: PetscCall((*f)(F, S));
9909: } else {
9910: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9911: }
9912: }
9913: if (status) *status = F->schur_status;
9914: PetscFunctionReturn(PETSC_SUCCESS);
9915: }
9917: /*@
9918: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9920: Logically Collective
9922: Input Parameters:
9923: + F - the factored matrix obtained by calling `MatGetFactor()`
9924: . S - location where to return the Schur complement, can be `NULL`
9925: - status - the status of the Schur complement matrix, can be `NULL`
9927: Level: advanced
9929: Notes:
9930: You must call `MatFactorSetSchurIS()` before calling this routine.
9932: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9934: The routine returns a the Schur Complement stored within the data structures of the solver.
9936: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9938: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9940: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9942: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9944: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9945: @*/
9946: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9947: {
9948: PetscFunctionBegin;
9950: if (S) {
9951: PetscAssertPointer(S, 2);
9952: *S = F->schur;
9953: }
9954: if (status) {
9955: PetscAssertPointer(status, 3);
9956: *status = F->schur_status;
9957: }
9958: PetscFunctionReturn(PETSC_SUCCESS);
9959: }
9961: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9962: {
9963: Mat S = F->schur;
9965: PetscFunctionBegin;
9966: switch (F->schur_status) {
9967: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9968: case MAT_FACTOR_SCHUR_INVERTED:
9969: if (S) {
9970: S->ops->solve = NULL;
9971: S->ops->matsolve = NULL;
9972: S->ops->solvetranspose = NULL;
9973: S->ops->matsolvetranspose = NULL;
9974: S->ops->solveadd = NULL;
9975: S->ops->solvetransposeadd = NULL;
9976: S->factortype = MAT_FACTOR_NONE;
9977: PetscCall(PetscFree(S->solvertype));
9978: }
9979: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9980: break;
9981: default:
9982: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9983: }
9984: PetscFunctionReturn(PETSC_SUCCESS);
9985: }
9987: /*@
9988: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9990: Logically Collective
9992: Input Parameters:
9993: + F - the factored matrix obtained by calling `MatGetFactor()`
9994: . S - location where the Schur complement is stored
9995: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9997: Level: advanced
9999: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
10000: @*/
10001: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
10002: {
10003: PetscFunctionBegin;
10005: if (S) {
10007: *S = NULL;
10008: }
10009: F->schur_status = status;
10010: PetscCall(MatFactorUpdateSchurStatus_Private(F));
10011: PetscFunctionReturn(PETSC_SUCCESS);
10012: }
10014: /*@
10015: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
10017: Logically Collective
10019: Input Parameters:
10020: + F - the factored matrix obtained by calling `MatGetFactor()`
10021: . rhs - location where the right-hand side of the Schur complement system is stored
10022: - sol - location where the solution of the Schur complement system has to be returned
10024: Level: advanced
10026: Notes:
10027: The sizes of the vectors should match the size of the Schur complement
10029: Must be called after `MatFactorSetSchurIS()`
10031: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
10032: @*/
10033: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
10034: {
10035: PetscFunctionBegin;
10042: PetscCheckSameComm(F, 1, rhs, 2);
10043: PetscCheckSameComm(F, 1, sol, 3);
10044: PetscCall(MatFactorFactorizeSchurComplement(F));
10045: switch (F->schur_status) {
10046: case MAT_FACTOR_SCHUR_FACTORED:
10047: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
10048: break;
10049: case MAT_FACTOR_SCHUR_INVERTED:
10050: PetscCall(MatMultTranspose(F->schur, rhs, sol));
10051: break;
10052: default:
10053: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
10054: }
10055: PetscFunctionReturn(PETSC_SUCCESS);
10056: }
10058: /*@
10059: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
10061: Logically Collective
10063: Input Parameters:
10064: + F - the factored matrix obtained by calling `MatGetFactor()`
10065: . rhs - location where the right-hand side of the Schur complement system is stored
10066: - sol - location where the solution of the Schur complement system has to be returned
10068: Level: advanced
10070: Notes:
10071: The sizes of the vectors should match the size of the Schur complement
10073: Must be called after `MatFactorSetSchurIS()`
10075: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
10076: @*/
10077: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
10078: {
10079: PetscFunctionBegin;
10086: PetscCheckSameComm(F, 1, rhs, 2);
10087: PetscCheckSameComm(F, 1, sol, 3);
10088: PetscCall(MatFactorFactorizeSchurComplement(F));
10089: switch (F->schur_status) {
10090: case MAT_FACTOR_SCHUR_FACTORED:
10091: PetscCall(MatSolve(F->schur, rhs, sol));
10092: break;
10093: case MAT_FACTOR_SCHUR_INVERTED:
10094: PetscCall(MatMult(F->schur, rhs, sol));
10095: break;
10096: default:
10097: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
10098: }
10099: PetscFunctionReturn(PETSC_SUCCESS);
10100: }
10102: PETSC_EXTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
10103: #if PetscDefined(HAVE_CUDA)
10104: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
10105: #endif
10107: /* Schur status updated in the interface */
10108: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
10109: {
10110: Mat S = F->schur;
10112: PetscFunctionBegin;
10113: if (S) {
10114: PetscMPIInt size;
10115: PetscBool isdense, isdensecuda;
10117: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
10118: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
10119: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
10120: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
10121: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
10122: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
10123: if (isdense) {
10124: PetscCall(MatSeqDenseInvertFactors_Private(S));
10125: } else if (isdensecuda) {
10126: #if defined(PETSC_HAVE_CUDA)
10127: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
10128: #endif
10129: }
10130: // HIP??????????????
10131: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
10132: }
10133: PetscFunctionReturn(PETSC_SUCCESS);
10134: }
10136: /*@
10137: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
10139: Logically Collective
10141: Input Parameter:
10142: . F - the factored matrix obtained by calling `MatGetFactor()`
10144: Level: advanced
10146: Notes:
10147: Must be called after `MatFactorSetSchurIS()`.
10149: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
10151: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
10152: @*/
10153: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
10154: {
10155: PetscFunctionBegin;
10158: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
10159: PetscCall(MatFactorFactorizeSchurComplement(F));
10160: PetscCall(MatFactorInvertSchurComplement_Private(F));
10161: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
10162: PetscFunctionReturn(PETSC_SUCCESS);
10163: }
10165: /*@
10166: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
10168: Logically Collective
10170: Input Parameter:
10171: . F - the factored matrix obtained by calling `MatGetFactor()`
10173: Level: advanced
10175: Note:
10176: Must be called after `MatFactorSetSchurIS()`
10178: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10179: @*/
10180: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10181: {
10182: MatFactorInfo info;
10184: PetscFunctionBegin;
10187: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10188: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10189: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10190: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10191: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10192: } else {
10193: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10194: }
10195: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10196: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10197: PetscFunctionReturn(PETSC_SUCCESS);
10198: }
10200: /*@
10201: MatPtAP - Creates the matrix product $C = P^T * A * P$
10203: Neighbor-wise Collective
10205: Input Parameters:
10206: + A - the matrix
10207: . P - the projection matrix
10208: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10209: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10210: if the result is a dense matrix this is irrelevant
10212: Output Parameter:
10213: . C - the product matrix
10215: Level: intermediate
10217: Notes:
10218: C will be created and must be destroyed by the user with `MatDestroy()`.
10220: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10222: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10224: Developer Note:
10225: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10227: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10228: @*/
10229: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10230: {
10231: PetscFunctionBegin;
10232: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10233: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10235: if (scall == MAT_INITIAL_MATRIX) {
10236: PetscCall(MatProductCreate(A, P, NULL, C));
10237: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10238: PetscCall(MatProductSetAlgorithm(*C, "default"));
10239: PetscCall(MatProductSetFill(*C, fill));
10241: (*C)->product->api_user = PETSC_TRUE;
10242: PetscCall(MatProductSetFromOptions(*C));
10243: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10244: PetscCall(MatProductSymbolic(*C));
10245: } else { /* scall == MAT_REUSE_MATRIX */
10246: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10247: }
10249: PetscCall(MatProductNumeric(*C));
10250: (*C)->symmetric = A->symmetric;
10251: (*C)->spd = A->spd;
10252: PetscFunctionReturn(PETSC_SUCCESS);
10253: }
10255: /*@
10256: MatRARt - Creates the matrix product $C = R * A * R^T$
10258: Neighbor-wise Collective
10260: Input Parameters:
10261: + A - the matrix
10262: . R - the projection matrix
10263: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10264: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10265: if the result is a dense matrix this is irrelevant
10267: Output Parameter:
10268: . C - the product matrix
10270: Level: intermediate
10272: Notes:
10273: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10275: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10277: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10278: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10279: parallel `MatRARt()` is implemented via explicit transpose of `R`, which could be very expensive.
10280: We recommend using `MatPtAP()`.
10282: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10284: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10285: @*/
10286: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10287: {
10288: PetscFunctionBegin;
10289: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10290: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10292: if (scall == MAT_INITIAL_MATRIX) {
10293: PetscCall(MatProductCreate(A, R, NULL, C));
10294: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10295: PetscCall(MatProductSetAlgorithm(*C, "default"));
10296: PetscCall(MatProductSetFill(*C, fill));
10298: (*C)->product->api_user = PETSC_TRUE;
10299: PetscCall(MatProductSetFromOptions(*C));
10300: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10301: PetscCall(MatProductSymbolic(*C));
10302: } else { /* scall == MAT_REUSE_MATRIX */
10303: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10304: }
10306: PetscCall(MatProductNumeric(*C));
10307: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10308: PetscFunctionReturn(PETSC_SUCCESS);
10309: }
10311: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10312: {
10313: PetscBool flg = PETSC_TRUE;
10315: PetscFunctionBegin;
10316: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10317: if (scall == MAT_INITIAL_MATRIX) {
10318: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10319: PetscCall(MatProductCreate(A, B, NULL, C));
10320: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10321: PetscCall(MatProductSetFill(*C, fill));
10322: } else { /* scall == MAT_REUSE_MATRIX */
10323: Mat_Product *product = (*C)->product;
10325: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10326: if (flg && product && product->type != ptype) {
10327: PetscCall(MatProductClear(*C));
10328: product = NULL;
10329: }
10330: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10331: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10332: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10333: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10334: product = (*C)->product;
10335: product->fill = fill;
10336: product->clear = PETSC_TRUE;
10337: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10338: flg = PETSC_FALSE;
10339: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10340: }
10341: }
10342: if (flg) {
10343: (*C)->product->api_user = PETSC_TRUE;
10344: PetscCall(MatProductSetType(*C, ptype));
10345: PetscCall(MatProductSetFromOptions(*C));
10346: PetscCall(MatProductSymbolic(*C));
10347: }
10348: PetscCall(MatProductNumeric(*C));
10349: PetscFunctionReturn(PETSC_SUCCESS);
10350: }
10352: /*@
10353: MatMatMult - Performs matrix-matrix multiplication C=A*B.
10355: Neighbor-wise Collective
10357: Input Parameters:
10358: + A - the left matrix
10359: . B - the right matrix
10360: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10361: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10362: if the result is a dense matrix this is irrelevant
10364: Output Parameter:
10365: . C - the product matrix
10367: Notes:
10368: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10370: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10371: call to this function with `MAT_INITIAL_MATRIX`.
10373: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value actually needed.
10375: In the special case where matrix `B` (and hence `C`) are dense you can create the correctly sized matrix `C` yourself and then call this routine with `MAT_REUSE_MATRIX`,
10376: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix `C` is sparse.
10378: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10380: Example of Usage:
10381: .vb
10382: MatProductCreate(A,B,NULL,&C);
10383: MatProductSetType(C,MATPRODUCT_AB);
10384: MatProductSymbolic(C);
10385: MatProductNumeric(C); // compute C=A * B
10386: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10387: MatProductNumeric(C);
10388: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10389: MatProductNumeric(C);
10390: .ve
10392: Level: intermediate
10394: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10395: @*/
10396: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10397: {
10398: PetscFunctionBegin;
10399: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10400: PetscFunctionReturn(PETSC_SUCCESS);
10401: }
10403: /*@
10404: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10406: Neighbor-wise Collective
10408: Input Parameters:
10409: + A - the left matrix
10410: . B - the right matrix
10411: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10412: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10414: Output Parameter:
10415: . C - the product matrix
10417: Options Database Key:
10418: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10419: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10420: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10422: Level: intermediate
10424: Notes:
10425: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10427: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10429: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10430: actually needed.
10432: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10433: and for pairs of `MATMPIDENSE` matrices.
10435: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`
10437: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10439: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10440: @*/
10441: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10442: {
10443: PetscFunctionBegin;
10444: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10445: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10446: PetscFunctionReturn(PETSC_SUCCESS);
10447: }
10449: /*@
10450: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10452: Neighbor-wise Collective
10454: Input Parameters:
10455: + A - the left matrix
10456: . B - the right matrix
10457: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10458: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10460: Output Parameter:
10461: . C - the product matrix
10463: Level: intermediate
10465: Notes:
10466: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10468: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
10470: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`
10472: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10473: actually needed.
10475: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10476: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10478: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10480: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10481: @*/
10482: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10483: {
10484: PetscFunctionBegin;
10485: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10486: PetscFunctionReturn(PETSC_SUCCESS);
10487: }
10489: /*@
10490: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10492: Neighbor-wise Collective
10494: Input Parameters:
10495: + A - the left matrix
10496: . B - the middle matrix
10497: . C - the right matrix
10498: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10499: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10500: if the result is a dense matrix this is irrelevant
10502: Output Parameter:
10503: . D - the product matrix
10505: Level: intermediate
10507: Notes:
10508: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10510: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10512: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`
10514: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10515: actually needed.
10517: If you have many matrices with the same non-zero structure to multiply, you
10518: should use `MAT_REUSE_MATRIX` in all calls but the first
10520: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10522: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10523: @*/
10524: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10525: {
10526: PetscFunctionBegin;
10527: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10528: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10530: if (scall == MAT_INITIAL_MATRIX) {
10531: PetscCall(MatProductCreate(A, B, C, D));
10532: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10533: PetscCall(MatProductSetAlgorithm(*D, "default"));
10534: PetscCall(MatProductSetFill(*D, fill));
10536: (*D)->product->api_user = PETSC_TRUE;
10537: PetscCall(MatProductSetFromOptions(*D));
10538: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10539: ((PetscObject)C)->type_name);
10540: PetscCall(MatProductSymbolic(*D));
10541: } else { /* user may change input matrices when REUSE */
10542: PetscCall(MatProductReplaceMats(A, B, C, *D));
10543: }
10544: PetscCall(MatProductNumeric(*D));
10545: PetscFunctionReturn(PETSC_SUCCESS);
10546: }
10548: /*@
10549: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10551: Collective
10553: Input Parameters:
10554: + mat - the matrix
10555: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10556: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10557: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10559: Output Parameter:
10560: . matredundant - redundant matrix
10562: Level: advanced
10564: Notes:
10565: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10566: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10568: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10569: calling it.
10571: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10573: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10574: @*/
10575: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10576: {
10577: MPI_Comm comm;
10578: PetscMPIInt size;
10579: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10580: Mat_Redundant *redund = NULL;
10581: PetscSubcomm psubcomm = NULL;
10582: MPI_Comm subcomm_in = subcomm;
10583: Mat *matseq;
10584: IS isrow, iscol;
10585: PetscBool newsubcomm = PETSC_FALSE;
10587: PetscFunctionBegin;
10589: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10590: PetscAssertPointer(*matredundant, 5);
10592: }
10594: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10595: if (size == 1 || nsubcomm == 1) {
10596: if (reuse == MAT_INITIAL_MATRIX) {
10597: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10598: } else {
10599: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10600: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10601: }
10602: PetscFunctionReturn(PETSC_SUCCESS);
10603: }
10605: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10606: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10607: MatCheckPreallocated(mat, 1);
10609: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10610: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10611: /* create psubcomm, then get subcomm */
10612: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10613: PetscCallMPI(MPI_Comm_size(comm, &size));
10614: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10616: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10617: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10618: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10619: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10620: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10621: newsubcomm = PETSC_TRUE;
10622: PetscCall(PetscSubcommDestroy(&psubcomm));
10623: }
10625: /* get isrow, iscol and a local sequential matrix matseq[0] */
10626: if (reuse == MAT_INITIAL_MATRIX) {
10627: mloc_sub = PETSC_DECIDE;
10628: nloc_sub = PETSC_DECIDE;
10629: if (bs < 1) {
10630: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10631: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10632: } else {
10633: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10634: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10635: }
10636: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10637: rstart = rend - mloc_sub;
10638: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10639: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10640: PetscCall(ISSetIdentity(iscol));
10641: } else { /* reuse == MAT_REUSE_MATRIX */
10642: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10643: /* retrieve subcomm */
10644: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10645: redund = (*matredundant)->redundant;
10646: isrow = redund->isrow;
10647: iscol = redund->iscol;
10648: matseq = redund->matseq;
10649: }
10650: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10652: /* get matredundant over subcomm */
10653: if (reuse == MAT_INITIAL_MATRIX) {
10654: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10656: /* create a supporting struct and attach it to C for reuse */
10657: PetscCall(PetscNew(&redund));
10658: (*matredundant)->redundant = redund;
10659: redund->isrow = isrow;
10660: redund->iscol = iscol;
10661: redund->matseq = matseq;
10662: if (newsubcomm) {
10663: redund->subcomm = subcomm;
10664: } else {
10665: redund->subcomm = MPI_COMM_NULL;
10666: }
10667: } else {
10668: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10669: }
10670: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10671: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10672: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10673: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10674: }
10675: #endif
10676: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10677: PetscFunctionReturn(PETSC_SUCCESS);
10678: }
10680: /*@C
10681: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10682: a given `Mat`. Each submatrix can span multiple procs.
10684: Collective
10686: Input Parameters:
10687: + mat - the matrix
10688: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10689: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10691: Output Parameter:
10692: . subMat - parallel sub-matrices each spanning a given `subcomm`
10694: Level: advanced
10696: Notes:
10697: The submatrix partition across processors is dictated by `subComm` a
10698: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10699: is not restricted to be grouped with consecutive original MPI processes.
10701: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10702: map directly to the layout of the original matrix [wrt the local
10703: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10704: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10705: the `subMat`. However the offDiagMat looses some columns - and this is
10706: reconstructed with `MatSetValues()`
10708: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10710: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10711: @*/
10712: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10713: {
10714: PetscMPIInt commsize, subCommSize;
10716: PetscFunctionBegin;
10717: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10718: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10719: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10721: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10722: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10723: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10724: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10725: PetscFunctionReturn(PETSC_SUCCESS);
10726: }
10728: /*@
10729: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10731: Not Collective
10733: Input Parameters:
10734: + mat - matrix to extract local submatrix from
10735: . isrow - local row indices for submatrix
10736: - iscol - local column indices for submatrix
10738: Output Parameter:
10739: . submat - the submatrix
10741: Level: intermediate
10743: Notes:
10744: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10746: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10747: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10749: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10750: `MatSetValuesBlockedLocal()` will also be implemented.
10752: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10753: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10755: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10756: @*/
10757: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10758: {
10759: PetscFunctionBegin;
10763: PetscCheckSameComm(isrow, 2, iscol, 3);
10764: PetscAssertPointer(submat, 4);
10765: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10767: if (mat->ops->getlocalsubmatrix) {
10768: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10769: } else {
10770: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10771: }
10772: PetscFunctionReturn(PETSC_SUCCESS);
10773: }
10775: /*@
10776: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10778: Not Collective
10780: Input Parameters:
10781: + mat - matrix to extract local submatrix from
10782: . isrow - local row indices for submatrix
10783: . iscol - local column indices for submatrix
10784: - submat - the submatrix
10786: Level: intermediate
10788: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10789: @*/
10790: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10791: {
10792: PetscFunctionBegin;
10796: PetscCheckSameComm(isrow, 2, iscol, 3);
10797: PetscAssertPointer(submat, 4);
10800: if (mat->ops->restorelocalsubmatrix) {
10801: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10802: } else {
10803: PetscCall(MatDestroy(submat));
10804: }
10805: *submat = NULL;
10806: PetscFunctionReturn(PETSC_SUCCESS);
10807: }
10809: /*@
10810: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10812: Collective
10814: Input Parameter:
10815: . mat - the matrix
10817: Output Parameter:
10818: . is - if any rows have zero diagonals this contains the list of them
10820: Level: developer
10822: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10823: @*/
10824: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10825: {
10826: PetscFunctionBegin;
10829: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10830: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10832: if (!mat->ops->findzerodiagonals) {
10833: Vec diag;
10834: const PetscScalar *a;
10835: PetscInt *rows;
10836: PetscInt rStart, rEnd, r, nrow = 0;
10838: PetscCall(MatCreateVecs(mat, &diag, NULL));
10839: PetscCall(MatGetDiagonal(mat, diag));
10840: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10841: PetscCall(VecGetArrayRead(diag, &a));
10842: for (r = 0; r < rEnd - rStart; ++r)
10843: if (a[r] == 0.0) ++nrow;
10844: PetscCall(PetscMalloc1(nrow, &rows));
10845: nrow = 0;
10846: for (r = 0; r < rEnd - rStart; ++r)
10847: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10848: PetscCall(VecRestoreArrayRead(diag, &a));
10849: PetscCall(VecDestroy(&diag));
10850: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10851: } else {
10852: PetscUseTypeMethod(mat, findzerodiagonals, is);
10853: }
10854: PetscFunctionReturn(PETSC_SUCCESS);
10855: }
10857: /*@
10858: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10860: Collective
10862: Input Parameter:
10863: . mat - the matrix
10865: Output Parameter:
10866: . is - contains the list of rows with off block diagonal entries
10868: Level: developer
10870: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10871: @*/
10872: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10873: {
10874: PetscFunctionBegin;
10877: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10878: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10880: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10881: PetscFunctionReturn(PETSC_SUCCESS);
10882: }
10884: /*@C
10885: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10887: Collective; No Fortran Support
10889: Input Parameter:
10890: . mat - the matrix
10892: Output Parameter:
10893: . values - the block inverses in column major order (FORTRAN-like)
10895: Level: advanced
10897: Notes:
10898: The size of the blocks is determined by the block size of the matrix.
10900: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10902: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10904: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10905: @*/
10906: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10907: {
10908: PetscFunctionBegin;
10910: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10911: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10912: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10913: PetscFunctionReturn(PETSC_SUCCESS);
10914: }
10916: /*@
10917: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10919: Collective; No Fortran Support
10921: Input Parameters:
10922: + mat - the matrix
10923: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10924: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10926: Output Parameter:
10927: . values - the block inverses in column major order (FORTRAN-like)
10929: Level: advanced
10931: Notes:
10932: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10934: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10936: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10937: @*/
10938: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10939: {
10940: PetscFunctionBegin;
10942: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10943: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10944: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10945: PetscFunctionReturn(PETSC_SUCCESS);
10946: }
10948: /*@
10949: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10951: Collective
10953: Input Parameters:
10954: + A - the matrix
10955: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10957: Level: advanced
10959: Note:
10960: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10962: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10963: @*/
10964: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10965: {
10966: const PetscScalar *vals;
10967: PetscInt *dnnz;
10968: PetscInt m, rstart, rend, bs, i, j;
10970: PetscFunctionBegin;
10971: PetscCall(MatInvertBlockDiagonal(A, &vals));
10972: PetscCall(MatGetBlockSize(A, &bs));
10973: PetscCall(MatGetLocalSize(A, &m, NULL));
10974: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10975: PetscCall(PetscMalloc1(m / bs, &dnnz));
10976: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10977: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10978: PetscCall(PetscFree(dnnz));
10979: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10980: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10981: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10982: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10983: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10984: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10985: PetscFunctionReturn(PETSC_SUCCESS);
10986: }
10988: /*@
10989: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10990: via `MatTransposeColoringCreate()`.
10992: Collective
10994: Input Parameter:
10995: . c - coloring context
10997: Level: intermediate
10999: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
11000: @*/
11001: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
11002: {
11003: MatTransposeColoring matcolor = *c;
11005: PetscFunctionBegin;
11006: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
11007: if (--((PetscObject)matcolor)->refct > 0) {
11008: matcolor = NULL;
11009: PetscFunctionReturn(PETSC_SUCCESS);
11010: }
11012: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
11013: PetscCall(PetscFree(matcolor->rows));
11014: PetscCall(PetscFree(matcolor->den2sp));
11015: PetscCall(PetscFree(matcolor->colorforcol));
11016: PetscCall(PetscFree(matcolor->columns));
11017: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
11018: PetscCall(PetscHeaderDestroy(c));
11019: PetscFunctionReturn(PETSC_SUCCESS);
11020: }
11022: /*@
11023: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
11024: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
11025: `MatTransposeColoring` to sparse `B`.
11027: Collective
11029: Input Parameters:
11030: + coloring - coloring context created with `MatTransposeColoringCreate()`
11031: - B - sparse matrix
11033: Output Parameter:
11034: . Btdense - dense matrix $B^T$
11036: Level: developer
11038: Note:
11039: These are used internally for some implementations of `MatRARt()`
11041: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
11042: @*/
11043: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
11044: {
11045: PetscFunctionBegin;
11050: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
11051: PetscFunctionReturn(PETSC_SUCCESS);
11052: }
11054: /*@
11055: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
11056: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
11057: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
11058: $C_{sp}$ from $C_{den}$.
11060: Collective
11062: Input Parameters:
11063: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
11064: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
11066: Output Parameter:
11067: . Csp - sparse matrix
11069: Level: developer
11071: Note:
11072: These are used internally for some implementations of `MatRARt()`
11074: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
11075: @*/
11076: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
11077: {
11078: PetscFunctionBegin;
11083: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
11084: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
11085: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
11086: PetscFunctionReturn(PETSC_SUCCESS);
11087: }
11089: /*@
11090: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
11092: Collective
11094: Input Parameters:
11095: + mat - the matrix product C
11096: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
11098: Output Parameter:
11099: . color - the new coloring context
11101: Level: intermediate
11103: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
11104: `MatTransColoringApplyDenToSp()`
11105: @*/
11106: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
11107: {
11108: MatTransposeColoring c;
11109: MPI_Comm comm;
11111: PetscFunctionBegin;
11112: PetscAssertPointer(color, 3);
11114: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11115: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
11116: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
11117: c->ctype = iscoloring->ctype;
11118: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
11119: *color = c;
11120: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11121: PetscFunctionReturn(PETSC_SUCCESS);
11122: }
11124: /*@
11125: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
11126: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
11128: Not Collective
11130: Input Parameter:
11131: . mat - the matrix
11133: Output Parameter:
11134: . state - the current state
11136: Level: intermediate
11138: Notes:
11139: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
11140: different matrices
11142: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
11144: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
11146: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
11147: @*/
11148: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
11149: {
11150: PetscFunctionBegin;
11152: *state = mat->nonzerostate;
11153: PetscFunctionReturn(PETSC_SUCCESS);
11154: }
11156: /*@
11157: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
11158: matrices from each processor
11160: Collective
11162: Input Parameters:
11163: + comm - the communicators the parallel matrix will live on
11164: . seqmat - the input sequential matrices
11165: . n - number of local columns (or `PETSC_DECIDE`)
11166: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11168: Output Parameter:
11169: . mpimat - the parallel matrix generated
11171: Level: developer
11173: Note:
11174: The number of columns of the matrix in EACH processor MUST be the same.
11176: .seealso: [](ch_matrices), `Mat`
11177: @*/
11178: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11179: {
11180: PetscMPIInt size;
11182: PetscFunctionBegin;
11183: PetscCallMPI(MPI_Comm_size(comm, &size));
11184: if (size == 1) {
11185: if (reuse == MAT_INITIAL_MATRIX) {
11186: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11187: } else {
11188: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11189: }
11190: PetscFunctionReturn(PETSC_SUCCESS);
11191: }
11193: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
11195: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11196: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11197: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11198: PetscFunctionReturn(PETSC_SUCCESS);
11199: }
11201: /*@
11202: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
11204: Collective
11206: Input Parameters:
11207: + A - the matrix to create subdomains from
11208: - N - requested number of subdomains
11210: Output Parameters:
11211: + n - number of subdomains resulting on this MPI process
11212: - iss - `IS` list with indices of subdomains on this MPI process
11214: Level: advanced
11216: Note:
11217: The number of subdomains must be smaller than the communicator size
11219: .seealso: [](ch_matrices), `Mat`, `IS`
11220: @*/
11221: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11222: {
11223: MPI_Comm comm, subcomm;
11224: PetscMPIInt size, rank, color;
11225: PetscInt rstart, rend, k;
11227: PetscFunctionBegin;
11228: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11229: PetscCallMPI(MPI_Comm_size(comm, &size));
11230: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11231: PetscCheck(N >= 1 && N < size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11232: *n = 1;
11233: k = size / N + (size % N > 0); /* There are up to k ranks to a color */
11234: color = rank / k;
11235: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11236: PetscCall(PetscMalloc1(1, iss));
11237: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11238: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11239: PetscCallMPI(MPI_Comm_free(&subcomm));
11240: PetscFunctionReturn(PETSC_SUCCESS);
11241: }
11243: /*@
11244: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11246: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11247: If they are not the same, uses `MatMatMatMult()`.
11249: Once the coarse grid problem is constructed, correct for interpolation operators
11250: that are not of full rank, which can legitimately happen in the case of non-nested
11251: geometric multigrid.
11253: Input Parameters:
11254: + restrct - restriction operator
11255: . dA - fine grid matrix
11256: . interpolate - interpolation operator
11257: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11258: - fill - expected fill, use `PETSC_DETERMINE` or `PETSC_DETERMINE` if you do not have a good estimate
11260: Output Parameter:
11261: . A - the Galerkin coarse matrix
11263: Options Database Key:
11264: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
11266: Level: developer
11268: Note:
11269: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
11271: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11272: @*/
11273: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11274: {
11275: IS zerorows;
11276: Vec diag;
11278: PetscFunctionBegin;
11279: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11280: /* Construct the coarse grid matrix */
11281: if (interpolate == restrct) {
11282: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11283: } else {
11284: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11285: }
11287: /* If the interpolation matrix is not of full rank, A will have zero rows.
11288: This can legitimately happen in the case of non-nested geometric multigrid.
11289: In that event, we set the rows of the matrix to the rows of the identity,
11290: ignoring the equations (as the RHS will also be zero). */
11292: PetscCall(MatFindZeroRows(*A, &zerorows));
11294: if (zerorows != NULL) { /* if there are any zero rows */
11295: PetscCall(MatCreateVecs(*A, &diag, NULL));
11296: PetscCall(MatGetDiagonal(*A, diag));
11297: PetscCall(VecISSet(diag, zerorows, 1.0));
11298: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11299: PetscCall(VecDestroy(&diag));
11300: PetscCall(ISDestroy(&zerorows));
11301: }
11302: PetscFunctionReturn(PETSC_SUCCESS);
11303: }
11305: /*@C
11306: MatSetOperation - Allows user to set a matrix operation for any matrix type
11308: Logically Collective
11310: Input Parameters:
11311: + mat - the matrix
11312: . op - the name of the operation
11313: - f - the function that provides the operation
11315: Level: developer
11317: Example Usage:
11318: .vb
11319: extern PetscErrorCode usermult(Mat, Vec, Vec);
11321: PetscCall(MatCreateXXX(comm, ..., &A));
11322: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscVoidFn *)usermult));
11323: .ve
11325: Notes:
11326: See the file `include/petscmat.h` for a complete list of matrix
11327: operations, which all have the form MATOP_<OPERATION>, where
11328: <OPERATION> is the name (in all capital letters) of the
11329: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11331: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11332: sequence as the usual matrix interface routines, since they
11333: are intended to be accessed via the usual matrix interface
11334: routines, e.g.,
11335: .vb
11336: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11337: .ve
11339: In particular each function MUST return `PETSC_SUCCESS` on success and
11340: nonzero on failure.
11342: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11344: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11345: @*/
11346: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, void (*f)(void))
11347: {
11348: PetscFunctionBegin;
11350: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))mat->ops->view) mat->ops->viewnative = mat->ops->view;
11351: (((void (**)(void))mat->ops)[op]) = f;
11352: PetscFunctionReturn(PETSC_SUCCESS);
11353: }
11355: /*@C
11356: MatGetOperation - Gets a matrix operation for any matrix type.
11358: Not Collective
11360: Input Parameters:
11361: + mat - the matrix
11362: - op - the name of the operation
11364: Output Parameter:
11365: . f - the function that provides the operation
11367: Level: developer
11369: Example Usage:
11370: .vb
11371: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11373: MatGetOperation(A, MATOP_MULT, (void (**)(void))&usermult);
11374: .ve
11376: Notes:
11377: See the file include/petscmat.h for a complete list of matrix
11378: operations, which all have the form MATOP_<OPERATION>, where
11379: <OPERATION> is the name (in all capital letters) of the
11380: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11382: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11384: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11385: @*/
11386: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, void (**f)(void))
11387: {
11388: PetscFunctionBegin;
11390: *f = (((void (**)(void))mat->ops)[op]);
11391: PetscFunctionReturn(PETSC_SUCCESS);
11392: }
11394: /*@
11395: MatHasOperation - Determines whether the given matrix supports the particular operation.
11397: Not Collective
11399: Input Parameters:
11400: + mat - the matrix
11401: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11403: Output Parameter:
11404: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11406: Level: advanced
11408: Note:
11409: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11411: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11412: @*/
11413: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11414: {
11415: PetscFunctionBegin;
11417: PetscAssertPointer(has, 3);
11418: if (mat->ops->hasoperation) {
11419: PetscUseTypeMethod(mat, hasoperation, op, has);
11420: } else {
11421: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11422: else {
11423: *has = PETSC_FALSE;
11424: if (op == MATOP_CREATE_SUBMATRIX) {
11425: PetscMPIInt size;
11427: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11428: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11429: }
11430: }
11431: }
11432: PetscFunctionReturn(PETSC_SUCCESS);
11433: }
11435: /*@
11436: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11438: Collective
11440: Input Parameter:
11441: . mat - the matrix
11443: Output Parameter:
11444: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11446: Level: beginner
11448: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11449: @*/
11450: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11451: {
11452: PetscFunctionBegin;
11455: PetscAssertPointer(cong, 2);
11456: if (!mat->rmap || !mat->cmap) {
11457: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11458: PetscFunctionReturn(PETSC_SUCCESS);
11459: }
11460: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11461: PetscCall(PetscLayoutSetUp(mat->rmap));
11462: PetscCall(PetscLayoutSetUp(mat->cmap));
11463: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11464: if (*cong) mat->congruentlayouts = 1;
11465: else mat->congruentlayouts = 0;
11466: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11467: PetscFunctionReturn(PETSC_SUCCESS);
11468: }
11470: PetscErrorCode MatSetInf(Mat A)
11471: {
11472: PetscFunctionBegin;
11473: PetscUseTypeMethod(A, setinf);
11474: PetscFunctionReturn(PETSC_SUCCESS);
11475: }
11477: /*@
11478: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11479: and possibly removes small values from the graph structure.
11481: Collective
11483: Input Parameters:
11484: + A - the matrix
11485: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11486: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11487: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11488: . num_idx - size of 'index' array
11489: - index - array of block indices to use for graph strength of connection weight
11491: Output Parameter:
11492: . graph - the resulting graph
11494: Level: advanced
11496: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11497: @*/
11498: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11499: {
11500: PetscFunctionBegin;
11504: PetscAssertPointer(graph, 7);
11505: PetscCall(PetscLogEventBegin(MAT_CreateGraph, A, 0, 0, 0));
11506: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11507: PetscCall(PetscLogEventEnd(MAT_CreateGraph, A, 0, 0, 0));
11508: PetscFunctionReturn(PETSC_SUCCESS);
11509: }
11511: /*@
11512: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11513: meaning the same memory is used for the matrix, and no new memory is allocated.
11515: Collective
11517: Input Parameters:
11518: + A - the matrix
11519: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11521: Level: intermediate
11523: Developer Note:
11524: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11525: of the arrays in the data structure are unneeded.
11527: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11528: @*/
11529: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11530: {
11531: PetscFunctionBegin;
11533: PetscUseTypeMethod(A, eliminatezeros, keep);
11534: PetscFunctionReturn(PETSC_SUCCESS);
11535: }