Actual source code: cp.c
1: #include <petsc/private/pcimpl.h>
2: #include <../src/mat/impls/aij/seq/aij.h>
4: /*
5: Private context (data structure) for the CP preconditioner.
6: */
7: typedef struct {
8: PetscInt n, m;
9: Vec work;
10: PetscScalar *d; /* sum of squares of each column */
11: PetscScalar *a; /* non-zeros by column */
12: PetscInt *i, *j; /* offsets of nonzeros by column, non-zero indices by column */
13: } PC_CP;
15: static PetscErrorCode PCSetUp_CP(PC pc)
16: {
17: PC_CP *cp = (PC_CP *)pc->data;
18: PetscInt i, j, *colcnt;
19: PetscBool flg;
20: Mat_SeqAIJ *aij = (Mat_SeqAIJ *)pc->pmat->data;
22: PetscFunctionBegin;
23: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATSEQAIJ, &flg));
24: PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Currently only handles SeqAIJ matrices");
26: PetscCall(MatGetLocalSize(pc->pmat, &cp->m, &cp->n));
27: PetscCheck(cp->m == cp->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Currently only for square matrices");
29: if (!cp->work) PetscCall(MatCreateVecs(pc->pmat, &cp->work, NULL));
30: if (!cp->d) PetscCall(PetscMalloc1(cp->n, &cp->d));
31: if (cp->a && pc->flag != SAME_NONZERO_PATTERN) {
32: PetscCall(PetscFree3(cp->a, cp->i, cp->j));
33: cp->a = NULL;
34: }
36: /* convert to column format */
37: if (!cp->a) PetscCall(PetscMalloc3(aij->nz, &cp->a, cp->n + 1, &cp->i, aij->nz, &cp->j));
38: PetscCall(PetscCalloc1(cp->n, &colcnt));
40: for (i = 0; i < aij->nz; i++) colcnt[aij->j[i]]++;
41: cp->i[0] = 0;
42: for (i = 0; i < cp->n; i++) cp->i[i + 1] = cp->i[i] + colcnt[i];
43: PetscCall(PetscArrayzero(colcnt, cp->n));
44: for (i = 0; i < cp->m; i++) { /* over rows */
45: for (j = aij->i[i]; j < aij->i[i + 1]; j++) { /* over columns in row */
46: cp->j[cp->i[aij->j[j]] + colcnt[aij->j[j]]] = i;
47: cp->a[cp->i[aij->j[j]] + colcnt[aij->j[j]]++] = aij->a[j];
48: }
49: }
50: PetscCall(PetscFree(colcnt));
52: /* compute sum of squares of each column d[] */
53: for (i = 0; i < cp->n; i++) { /* over columns */
54: cp->d[i] = 0.;
55: for (j = cp->i[i]; j < cp->i[i + 1]; j++) cp->d[i] += cp->a[j] * cp->a[j]; /* over rows in column */
56: cp->d[i] = 1.0 / cp->d[i];
57: }
58: PetscFunctionReturn(PETSC_SUCCESS);
59: }
61: static PetscErrorCode PCApply_CP(PC pc, Vec bb, Vec xx)
62: {
63: PC_CP *cp = (PC_CP *)pc->data;
64: PetscScalar *b, *x, xt;
66: PetscFunctionBegin;
67: PetscCall(VecCopy(bb, cp->work));
68: PetscCall(VecGetArray(cp->work, &b));
69: PetscCall(VecGetArray(xx, &x));
71: for (PetscInt i = 0; i < cp->n; i++) { /* over columns */
72: xt = 0.;
73: for (PetscInt j = cp->i[i]; j < cp->i[i + 1]; j++) xt += cp->a[j] * b[cp->j[j]]; /* over rows in column */
74: xt *= cp->d[i];
75: x[i] = xt;
76: for (PetscInt j = cp->i[i]; j < cp->i[i + 1]; j++) b[cp->j[j]] -= xt * cp->a[j]; /* over rows in column updating b*/
77: }
78: for (PetscInt i = cp->n - 1; i > -1; i--) { /* over columns */
79: xt = 0.;
80: for (PetscInt j = cp->i[i]; j < cp->i[i + 1]; j++) xt += cp->a[j] * b[cp->j[j]]; /* over rows in column */
81: xt *= cp->d[i];
82: x[i] = xt;
83: for (PetscInt j = cp->i[i]; j < cp->i[i + 1]; j++) b[cp->j[j]] -= xt * cp->a[j]; /* over rows in column updating b*/
84: }
86: PetscCall(VecRestoreArray(cp->work, &b));
87: PetscCall(VecRestoreArray(xx, &x));
88: PetscFunctionReturn(PETSC_SUCCESS);
89: }
91: static PetscErrorCode PCReset_CP(PC pc)
92: {
93: PC_CP *cp = (PC_CP *)pc->data;
95: PetscFunctionBegin;
96: PetscCall(PetscFree(cp->d));
97: PetscCall(VecDestroy(&cp->work));
98: PetscCall(PetscFree3(cp->a, cp->i, cp->j));
99: PetscFunctionReturn(PETSC_SUCCESS);
100: }
102: static PetscErrorCode PCDestroy_CP(PC pc)
103: {
104: PC_CP *cp = (PC_CP *)pc->data;
106: PetscFunctionBegin;
107: PetscCall(PCReset_CP(pc));
108: PetscCall(PetscFree(cp->d));
109: PetscCall(PetscFree3(cp->a, cp->i, cp->j));
110: PetscCall(PetscFree(pc->data));
111: PetscFunctionReturn(PETSC_SUCCESS);
112: }
114: /*MC
115: PCCP - a "column-projection" preconditioner. Iteratively projects the current residual onto the one dimensional spaces
116: spanned by each of the columns of the matrix.
118: This is a terrible preconditioner and is not recommended, ever!
120: Loops over the entries of x computing dx_i (e_i is the unit vector in the ith direction) to
121: .vb
123: min || b - A(x + dx_i e_i ||_2
124: dx_i
126: That is, it changes a single entry of x to minimize the new residual norm.
127: Let A_i represent the ith column of A, then the minimization can be written as
129: min || r - (dx_i) A e_i ||_2
130: dx_i
131: or min || r - (dx_i) A_i ||_2
132: dx_i
134: take the derivative with respect to dx_i to obtain
135: dx_i = (A_i^T A_i)^(-1) A_i^T r
137: This is equivalent to using Gauss-Seidel on the normal equations
138: .ve
140: Notes:
141: This procedure can also be done with block columns or any groups of columns
142: but this is not coded.
144: These "projections" can be done simultaneously for all columns (similar to the Jacobi method)
145: or sequentially (similar to Gauss-Seidel/SOR). This is only coded for SOR type.
147: This is related to, but not the same as "row projection" methods.
149: This is currently coded only for `MATSEQAIJ` matrices
151: Level: intermediate
153: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PCJACOBI`, `PCSOR`
154: M*/
156: PETSC_EXTERN PetscErrorCode PCCreate_CP(PC pc)
157: {
158: PC_CP *cp;
160: PetscFunctionBegin;
161: PetscCall(PetscNew(&cp));
162: pc->data = (void *)cp;
164: pc->ops->apply = PCApply_CP;
165: pc->ops->applytranspose = PCApply_CP;
166: pc->ops->setup = PCSetUp_CP;
167: pc->ops->reset = PCReset_CP;
168: pc->ops->destroy = PCDestroy_CP;
169: pc->ops->view = NULL;
170: pc->ops->applyrichardson = NULL;
171: PetscFunctionReturn(PETSC_SUCCESS);
172: }