Actual source code: tfs.c
1: /*
2: Provides an interface to the Tufo-Fischer parallel direct solver
3: */
5: #include <petsc/private/pcimpl.h>
6: #include <../src/mat/impls/aij/mpi/mpiaij.h>
7: #include <../src/ksp/pc/impls/tfs/tfs.h>
9: typedef struct {
10: xxt_ADT xxt;
11: xyt_ADT xyt;
12: Vec b, xd, xo;
13: PetscInt nd;
14: } PC_TFS;
16: static PetscErrorCode PCDestroy_TFS(PC pc)
17: {
18: PC_TFS *tfs = (PC_TFS *)pc->data;
20: PetscFunctionBegin;
21: /* free the XXT datastructures */
22: if (tfs->xxt) PetscCall(XXT_free(tfs->xxt));
23: if (tfs->xyt) PetscCall(XYT_free(tfs->xyt));
24: PetscCall(VecDestroy(&tfs->b));
25: PetscCall(VecDestroy(&tfs->xd));
26: PetscCall(VecDestroy(&tfs->xo));
27: PetscCall(PetscFree(pc->data));
28: PetscFunctionReturn(PETSC_SUCCESS);
29: }
31: static PetscErrorCode PCApply_TFS_XXT(PC pc, Vec x, Vec y)
32: {
33: PC_TFS *tfs = (PC_TFS *)pc->data;
34: PetscScalar *yy;
35: const PetscScalar *xx;
37: PetscFunctionBegin;
38: PetscCall(VecGetArrayRead(x, &xx));
39: PetscCall(VecGetArray(y, &yy));
40: PetscCall(XXT_solve(tfs->xxt, yy, (PetscScalar *)xx));
41: PetscCall(VecRestoreArrayRead(x, &xx));
42: PetscCall(VecRestoreArray(y, &yy));
43: PetscFunctionReturn(PETSC_SUCCESS);
44: }
46: static PetscErrorCode PCApply_TFS_XYT(PC pc, Vec x, Vec y)
47: {
48: PC_TFS *tfs = (PC_TFS *)pc->data;
49: PetscScalar *yy;
50: const PetscScalar *xx;
52: PetscFunctionBegin;
53: PetscCall(VecGetArrayRead(x, &xx));
54: PetscCall(VecGetArray(y, &yy));
55: PetscCall(XYT_solve(tfs->xyt, yy, (PetscScalar *)xx));
56: PetscCall(VecRestoreArrayRead(x, &xx));
57: PetscCall(VecRestoreArray(y, &yy));
58: PetscFunctionReturn(PETSC_SUCCESS);
59: }
61: static PetscErrorCode PCTFSLocalMult_TFS(PC pc, PetscScalar *xin, PetscScalar *xout)
62: {
63: PC_TFS *tfs = (PC_TFS *)pc->data;
64: Mat A = pc->pmat;
65: Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
67: PetscFunctionBegin;
68: PetscCall(VecPlaceArray(tfs->b, xout));
69: PetscCall(VecPlaceArray(tfs->xd, xin));
70: PetscCall(VecPlaceArray(tfs->xo, xin + tfs->nd));
71: PetscCall(MatMult(a->A, tfs->xd, tfs->b));
72: PetscCall(MatMultAdd(a->B, tfs->xo, tfs->b, tfs->b));
73: PetscCall(VecResetArray(tfs->b));
74: PetscCall(VecResetArray(tfs->xd));
75: PetscCall(VecResetArray(tfs->xo));
76: PetscFunctionReturn(PETSC_SUCCESS);
77: }
79: static PetscErrorCode PCSetUp_TFS(PC pc)
80: {
81: PC_TFS *tfs = (PC_TFS *)pc->data;
82: Mat A = pc->pmat;
83: Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
84: PetscInt *localtoglobal, ncol, i;
85: PetscBool ismpiaij;
87: /*
88: PetscBool issymmetric;
89: Petsc Real tol = 0.0;
90: */
92: PetscFunctionBegin;
93: PetscCheck(A->cmap->N == A->rmap->N, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_SIZ, "matrix must be square");
94: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPIAIJ, &ismpiaij));
95: PetscCheck(ismpiaij, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Currently only supports MPIAIJ matrices");
97: /* generate the local to global mapping */
98: ncol = a->A->cmap->n + a->B->cmap->n;
99: PetscCall(PetscMalloc1(ncol, &localtoglobal));
100: for (i = 0; i < a->A->cmap->n; i++) localtoglobal[i] = A->cmap->rstart + i + 1;
101: for (i = 0; i < a->B->cmap->n; i++) localtoglobal[i + a->A->cmap->n] = a->garray[i] + 1;
103: /* generate the vectors needed for the local solves */
104: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->A->rmap->n, NULL, &tfs->b));
105: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->A->cmap->n, NULL, &tfs->xd));
106: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->B->cmap->n, NULL, &tfs->xo));
107: tfs->nd = a->A->cmap->n;
109: /* ierr = MatIsSymmetric(A,tol,&issymmetric); */
110: /* if (issymmetric) { */
111: PetscCall(PetscBarrier((PetscObject)pc));
112: if (A->symmetric == PETSC_BOOL3_TRUE) {
113: tfs->xxt = XXT_new();
114: PetscCall(XXT_factor(tfs->xxt, localtoglobal, A->rmap->n, ncol, (PetscErrorCode (*)(void *, PetscScalar *, PetscScalar *))PCTFSLocalMult_TFS, pc));
115: pc->ops->apply = PCApply_TFS_XXT;
116: } else {
117: tfs->xyt = XYT_new();
118: PetscCall(XYT_factor(tfs->xyt, localtoglobal, A->rmap->n, ncol, (PetscErrorCode (*)(void *, PetscScalar *, PetscScalar *))PCTFSLocalMult_TFS, pc));
119: pc->ops->apply = PCApply_TFS_XYT;
120: }
122: PetscCall(PetscFree(localtoglobal));
123: PetscFunctionReturn(PETSC_SUCCESS);
124: }
126: /*MC
127: PCTFS - A parallel direct solver intended for problems with very few unknowns (like the
128: coarse grid in multigrid). Performs a Cholesky or LU factorization of a matrix defined by
129: its local matrix-vector product.
131: Level: beginner
133: Notes:
134: Only implemented for the `MATMPIAIJ` matrices
136: Only works on a solver object that lives on all of `PETSC_COMM_WORLD`!
138: Only works for real numbers (is not built if `PetscScalar` is complex)
140: Implemented by Henry M. Tufo III and Paul Fischer originally for Nek5000 and called XXT or XYT
142: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`
143: M*/
144: PETSC_EXTERN PetscErrorCode PCCreate_TFS(PC pc)
145: {
146: PC_TFS *tfs;
147: PetscMPIInt cmp;
149: PetscFunctionBegin;
150: PetscCallMPI(MPI_Comm_compare(PETSC_COMM_WORLD, PetscObjectComm((PetscObject)pc), &cmp));
151: PetscCheck(cmp == MPI_IDENT || cmp == MPI_CONGRUENT, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "TFS only works with PETSC_COMM_WORLD objects");
152: PetscCall(PetscNew(&tfs));
154: tfs->xxt = NULL;
155: tfs->xyt = NULL;
156: tfs->b = NULL;
157: tfs->xd = NULL;
158: tfs->xo = NULL;
159: tfs->nd = 0;
161: pc->ops->apply = NULL;
162: pc->ops->applytranspose = NULL;
163: pc->ops->setup = PCSetUp_TFS;
164: pc->ops->destroy = PCDestroy_TFS;
165: pc->ops->applyrichardson = NULL;
166: pc->ops->applysymmetricleft = NULL;
167: pc->ops->applysymmetricright = NULL;
168: pc->data = (void *)tfs;
169: PetscFunctionReturn(PETSC_SUCCESS);
170: }