Actual source code: tfs.c

  1: /*
  2:         Provides an interface to the Tufo-Fischer parallel direct solver
  3: */

  5: #include <petsc/private/pcimpl.h>
  6: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  7: #include <../src/ksp/pc/impls/tfs/tfs.h>

  9: typedef struct {
 10:   xxt_ADT  xxt;
 11:   xyt_ADT  xyt;
 12:   Vec      b, xd, xo;
 13:   PetscInt nd;
 14: } PC_TFS;

 16: static PetscErrorCode PCDestroy_TFS(PC pc)
 17: {
 18:   PC_TFS *tfs = (PC_TFS *)pc->data;

 20:   PetscFunctionBegin;
 21:   /* free the XXT datastructures */
 22:   if (tfs->xxt) PetscCall(XXT_free(tfs->xxt));
 23:   if (tfs->xyt) PetscCall(XYT_free(tfs->xyt));
 24:   PetscCall(VecDestroy(&tfs->b));
 25:   PetscCall(VecDestroy(&tfs->xd));
 26:   PetscCall(VecDestroy(&tfs->xo));
 27:   PetscCall(PetscFree(pc->data));
 28:   PetscFunctionReturn(PETSC_SUCCESS);
 29: }

 31: static PetscErrorCode PCApply_TFS_XXT(PC pc, Vec x, Vec y)
 32: {
 33:   PC_TFS            *tfs = (PC_TFS *)pc->data;
 34:   PetscScalar       *yy;
 35:   const PetscScalar *xx;

 37:   PetscFunctionBegin;
 38:   PetscCall(VecGetArrayRead(x, &xx));
 39:   PetscCall(VecGetArray(y, &yy));
 40:   PetscCall(XXT_solve(tfs->xxt, yy, (PetscScalar *)xx));
 41:   PetscCall(VecRestoreArrayRead(x, &xx));
 42:   PetscCall(VecRestoreArray(y, &yy));
 43:   PetscFunctionReturn(PETSC_SUCCESS);
 44: }

 46: static PetscErrorCode PCApply_TFS_XYT(PC pc, Vec x, Vec y)
 47: {
 48:   PC_TFS            *tfs = (PC_TFS *)pc->data;
 49:   PetscScalar       *yy;
 50:   const PetscScalar *xx;

 52:   PetscFunctionBegin;
 53:   PetscCall(VecGetArrayRead(x, &xx));
 54:   PetscCall(VecGetArray(y, &yy));
 55:   PetscCall(XYT_solve(tfs->xyt, yy, (PetscScalar *)xx));
 56:   PetscCall(VecRestoreArrayRead(x, &xx));
 57:   PetscCall(VecRestoreArray(y, &yy));
 58:   PetscFunctionReturn(PETSC_SUCCESS);
 59: }

 61: static PetscErrorCode PCTFSLocalMult_TFS(PC pc, PetscScalar *xin, PetscScalar *xout)
 62: {
 63:   PC_TFS     *tfs = (PC_TFS *)pc->data;
 64:   Mat         A   = pc->pmat;
 65:   Mat_MPIAIJ *a   = (Mat_MPIAIJ *)A->data;

 67:   PetscFunctionBegin;
 68:   PetscCall(VecPlaceArray(tfs->b, xout));
 69:   PetscCall(VecPlaceArray(tfs->xd, xin));
 70:   PetscCall(VecPlaceArray(tfs->xo, xin + tfs->nd));
 71:   PetscCall(MatMult(a->A, tfs->xd, tfs->b));
 72:   PetscCall(MatMultAdd(a->B, tfs->xo, tfs->b, tfs->b));
 73:   PetscCall(VecResetArray(tfs->b));
 74:   PetscCall(VecResetArray(tfs->xd));
 75:   PetscCall(VecResetArray(tfs->xo));
 76:   PetscFunctionReturn(PETSC_SUCCESS);
 77: }

 79: static PetscErrorCode PCSetUp_TFS(PC pc)
 80: {
 81:   PC_TFS     *tfs = (PC_TFS *)pc->data;
 82:   Mat         A   = pc->pmat;
 83:   Mat_MPIAIJ *a   = (Mat_MPIAIJ *)A->data;
 84:   PetscInt   *localtoglobal, ncol, i;
 85:   PetscBool   ismpiaij;

 87:   /*
 88:   PetscBool      issymmetric;
 89:   Petsc Real tol = 0.0;
 90:   */

 92:   PetscFunctionBegin;
 93:   PetscCheck(A->cmap->N == A->rmap->N, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_SIZ, "matrix must be square");
 94:   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPIAIJ, &ismpiaij));
 95:   PetscCheck(ismpiaij, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Currently only supports MPIAIJ matrices");

 97:   /* generate the local to global mapping */
 98:   ncol = a->A->cmap->n + a->B->cmap->n;
 99:   PetscCall(PetscMalloc1(ncol, &localtoglobal));
100:   for (i = 0; i < a->A->cmap->n; i++) localtoglobal[i] = A->cmap->rstart + i + 1;
101:   for (i = 0; i < a->B->cmap->n; i++) localtoglobal[i + a->A->cmap->n] = a->garray[i] + 1;

103:   /* generate the vectors needed for the local solves */
104:   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->A->rmap->n, NULL, &tfs->b));
105:   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->A->cmap->n, NULL, &tfs->xd));
106:   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->B->cmap->n, NULL, &tfs->xo));
107:   tfs->nd = a->A->cmap->n;

109:   /*  ierr =  MatIsSymmetric(A,tol,&issymmetric); */
110:   /*  if (issymmetric) { */
111:   PetscCall(PetscBarrier((PetscObject)pc));
112:   if (A->symmetric == PETSC_BOOL3_TRUE) {
113:     tfs->xxt = XXT_new();
114:     PetscCall(XXT_factor(tfs->xxt, localtoglobal, A->rmap->n, ncol, (PetscErrorCode(*)(void *, PetscScalar *, PetscScalar *))PCTFSLocalMult_TFS, pc));
115:     pc->ops->apply = PCApply_TFS_XXT;
116:   } else {
117:     tfs->xyt = XYT_new();
118:     PetscCall(XYT_factor(tfs->xyt, localtoglobal, A->rmap->n, ncol, (PetscErrorCode(*)(void *, PetscScalar *, PetscScalar *))PCTFSLocalMult_TFS, pc));
119:     pc->ops->apply = PCApply_TFS_XYT;
120:   }

122:   PetscCall(PetscFree(localtoglobal));
123:   PetscFunctionReturn(PETSC_SUCCESS);
124: }

126: static PetscErrorCode PCSetFromOptions_TFS(PC pc, PetscOptionItems *PetscOptionsObject)
127: {
128:   PetscFunctionBegin;
129:   PetscFunctionReturn(PETSC_SUCCESS);
130: }
131: static PetscErrorCode PCView_TFS(PC pc, PetscViewer viewer)
132: {
133:   PetscFunctionBegin;
134:   PetscFunctionReturn(PETSC_SUCCESS);
135: }

137: /*MC
138:      PCTFS - A parallel direct solver intended for problems with very few unknowns (like the
139:          coarse grid in multigrid). Performs a Cholesky or LU factorization of a matrix defined by
140:          its local matrix-vector product.

142:    Level: beginner

144:    Notes:
145:     Only implemented for the `MATMPIAIJ` matrices

147:     Only works on a solver object that lives on all of `PETSC_COMM_WORLD`!

149:     Only works for real numbers (is not built if `PetscScalar` is complex)

151:    Implemented by  Henry M. Tufo III and Paul Fischer originally for Nek5000 and called XXT or XYT

153: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`
154: M*/
155: PETSC_EXTERN PetscErrorCode PCCreate_TFS(PC pc)
156: {
157:   PC_TFS     *tfs;
158:   PetscMPIInt cmp;

160:   PetscFunctionBegin;
161:   PetscCallMPI(MPI_Comm_compare(PETSC_COMM_WORLD, PetscObjectComm((PetscObject)pc), &cmp));
162:   PetscCheck(cmp == MPI_IDENT || cmp == MPI_CONGRUENT, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "TFS only works with PETSC_COMM_WORLD objects");
163:   PetscCall(PetscNew(&tfs));

165:   tfs->xxt = NULL;
166:   tfs->xyt = NULL;
167:   tfs->b   = NULL;
168:   tfs->xd  = NULL;
169:   tfs->xo  = NULL;
170:   tfs->nd  = 0;

172:   pc->ops->apply               = NULL;
173:   pc->ops->applytranspose      = NULL;
174:   pc->ops->setup               = PCSetUp_TFS;
175:   pc->ops->destroy             = PCDestroy_TFS;
176:   pc->ops->setfromoptions      = PCSetFromOptions_TFS;
177:   pc->ops->view                = PCView_TFS;
178:   pc->ops->applyrichardson     = NULL;
179:   pc->ops->applysymmetricleft  = NULL;
180:   pc->ops->applysymmetricright = NULL;
181:   pc->data                     = (void *)tfs;
182:   PetscFunctionReturn(PETSC_SUCCESS);
183: }