Actual source code: ex65.c

  1: /*
  2:  Partial differential equation

  4:    d   d u = 1, 0 < x < 1,
  5:    --   --
  6:    dx   dx
  7: with boundary conditions

  9:    u = 0 for x = 0, x = 1

 11:    This uses multigrid to solve the linear system

 13:    Demonstrates how to build a DMSHELL for managing multigrid. The DMSHELL simply creates a
 14:    DMDA1d to construct all the needed PETSc objects.

 16: */

 18: static char help[] = "Solves 1D constant coefficient Laplacian using DMSHELL and multigrid.\n\n";

 20: #include <petscdm.h>
 21: #include <petscdmda.h>
 22: #include <petscdmshell.h>
 23: #include <petscksp.h>

 25: static PetscErrorCode ComputeMatrix(KSP, Mat, Mat, void *);
 26: static PetscErrorCode ComputeRHS(KSP, Vec, void *);
 27: static PetscErrorCode CreateMatrix(DM, Mat *);
 28: static PetscErrorCode CreateGlobalVector(DM, Vec *);
 29: static PetscErrorCode CreateLocalVector(DM, Vec *);
 30: static PetscErrorCode Refine(DM, MPI_Comm, DM *);
 31: static PetscErrorCode Coarsen(DM, MPI_Comm, DM *);
 32: static PetscErrorCode CreateInterpolation(DM, DM, Mat *, Vec *);
 33: static PetscErrorCode CreateRestriction(DM, DM, Mat *);
 34: static PetscErrorCode Destroy(void *);

 36: static PetscErrorCode MyDMShellCreate(MPI_Comm comm, DM da, DM *shell)
 37: {
 38:   PetscFunctionBeginUser;
 39:   PetscCall(DMShellCreate(comm, shell));
 40:   PetscCall(DMShellSetContext(*shell, da));
 41:   PetscCall(DMShellSetCreateMatrix(*shell, CreateMatrix));
 42:   PetscCall(DMShellSetCreateGlobalVector(*shell, CreateGlobalVector));
 43:   PetscCall(DMShellSetCreateLocalVector(*shell, CreateLocalVector));
 44:   PetscCall(DMShellSetRefine(*shell, Refine));
 45:   PetscCall(DMShellSetCoarsen(*shell, Coarsen));
 46:   PetscCall(DMShellSetCreateInterpolation(*shell, CreateInterpolation));
 47:   PetscCall(DMShellSetCreateRestriction(*shell, CreateRestriction));
 48:   PetscCall(DMShellSetDestroyContext(*shell, Destroy));
 49:   PetscFunctionReturn(PETSC_SUCCESS);
 50: }

 52: int main(int argc, char **argv)
 53: {
 54:   KSP      ksp;
 55:   DM       da, shell;
 56:   PetscInt levels;

 58:   PetscFunctionBeginUser;
 59:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 60:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
 61:   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, 129, 1, 1, 0, &da));
 62:   PetscCall(DMSetFromOptions(da));
 63:   PetscCall(DMSetUp(da));
 64:   PetscCall(MyDMShellCreate(PETSC_COMM_WORLD, da, &shell));
 65:   /* these two lines are not needed but allow PCMG to automatically know how many multigrid levels the user wants */
 66:   PetscCall(DMGetRefineLevel(da, &levels));
 67:   PetscCall(DMSetRefineLevel(shell, levels));

 69:   PetscCall(KSPSetDM(ksp, shell));
 70:   PetscCall(KSPSetComputeRHS(ksp, ComputeRHS, NULL));
 71:   PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix, NULL));
 72:   PetscCall(KSPSetFromOptions(ksp));
 73:   PetscCall(KSPSolve(ksp, NULL, NULL));

 75:   PetscCall(KSPDestroy(&ksp));
 76:   PetscCall(DMDestroy(&shell));
 77:   PetscCall(PetscFinalize());
 78:   return 0;
 79: }

 81: static PetscErrorCode Destroy(void *ctx)
 82: {
 83:   PetscFunctionBeginUser;
 84:   PetscCall(DMDestroy((DM *)&ctx));
 85:   PetscFunctionReturn(PETSC_SUCCESS);
 86: }

 88: static PetscErrorCode CreateMatrix(DM shell, Mat *A)
 89: {
 90:   DM da;

 92:   PetscFunctionBeginUser;
 93:   PetscCall(DMShellGetContext(shell, &da));
 94:   PetscCall(DMCreateMatrix(da, A));
 95:   PetscFunctionReturn(PETSC_SUCCESS);
 96: }

 98: static PetscErrorCode CreateInterpolation(DM dm1, DM dm2, Mat *mat, Vec *vec)
 99: {
100:   DM da1, da2;

102:   PetscFunctionBeginUser;
103:   PetscCall(DMShellGetContext(dm1, &da1));
104:   PetscCall(DMShellGetContext(dm2, &da2));
105:   PetscCall(DMCreateInterpolation(da1, da2, mat, vec));
106:   PetscFunctionReturn(PETSC_SUCCESS);
107: }

109: static PetscErrorCode CreateRestriction(DM dm1, DM dm2, Mat *mat)
110: {
111:   DM  da1, da2;
112:   Mat tmat;

114:   PetscFunctionBeginUser;
115:   PetscCall(DMShellGetContext(dm1, &da1));
116:   PetscCall(DMShellGetContext(dm2, &da2));
117:   PetscCall(DMCreateInterpolation(da1, da2, &tmat, NULL));
118:   PetscCall(MatTranspose(tmat, MAT_INITIAL_MATRIX, mat));
119:   PetscCall(MatDestroy(&tmat));
120:   PetscFunctionReturn(PETSC_SUCCESS);
121: }

123: static PetscErrorCode CreateGlobalVector(DM shell, Vec *x)
124: {
125:   DM da;

127:   PetscFunctionBeginUser;
128:   PetscCall(DMShellGetContext(shell, &da));
129:   PetscCall(DMCreateGlobalVector(da, x));
130:   PetscCall(VecSetDM(*x, shell));
131:   PetscFunctionReturn(PETSC_SUCCESS);
132: }

134: static PetscErrorCode CreateLocalVector(DM shell, Vec *x)
135: {
136:   DM da;

138:   PetscFunctionBeginUser;
139:   PetscCall(DMShellGetContext(shell, &da));
140:   PetscCall(DMCreateLocalVector(da, x));
141:   PetscCall(VecSetDM(*x, shell));
142:   PetscFunctionReturn(PETSC_SUCCESS);
143: }

145: static PetscErrorCode Refine(DM shell, MPI_Comm comm, DM *dmnew)
146: {
147:   DM da, dafine;

149:   PetscFunctionBeginUser;
150:   PetscCall(DMShellGetContext(shell, &da));
151:   PetscCall(DMRefine(da, comm, &dafine));
152:   PetscCall(MyDMShellCreate(PetscObjectComm((PetscObject)shell), dafine, dmnew));
153:   PetscFunctionReturn(PETSC_SUCCESS);
154: }

156: static PetscErrorCode Coarsen(DM shell, MPI_Comm comm, DM *dmnew)
157: {
158:   DM da, dacoarse;

160:   PetscFunctionBeginUser;
161:   PetscCall(DMShellGetContext(shell, &da));
162:   PetscCall(DMCoarsen(da, comm, &dacoarse));
163:   PetscCall(MyDMShellCreate(PetscObjectComm((PetscObject)shell), dacoarse, dmnew));
164:   PetscFunctionReturn(PETSC_SUCCESS);
165: }

167: static PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ctx)
168: {
169:   PetscInt    mx, idx[2];
170:   PetscScalar h, v[2];
171:   DM          da, shell;

173:   PetscFunctionBeginUser;
174:   PetscCall(KSPGetDM(ksp, &shell));
175:   PetscCall(DMShellGetContext(shell, &da));
176:   PetscCall(DMDAGetInfo(da, 0, &mx, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
177:   h = 1.0 / (mx - 1);
178:   PetscCall(VecSet(b, h));
179:   idx[0] = 0;
180:   idx[1] = mx - 1;
181:   v[0] = v[1] = 0.0;
182:   PetscCall(VecSetValues(b, 2, idx, v, INSERT_VALUES));
183:   PetscCall(VecAssemblyBegin(b));
184:   PetscCall(VecAssemblyEnd(b));
185:   PetscFunctionReturn(PETSC_SUCCESS);
186: }

188: static PetscErrorCode ComputeMatrix(KSP ksp, Mat J, Mat jac, void *ctx)
189: {
190:   PetscInt    i, mx, xm, xs;
191:   PetscScalar v[3], h;
192:   MatStencil  row, col[3];
193:   DM          da, shell;

195:   PetscFunctionBeginUser;
196:   PetscCall(KSPGetDM(ksp, &shell));
197:   PetscCall(DMShellGetContext(shell, &da));
198:   PetscCall(DMDAGetInfo(da, 0, &mx, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
199:   PetscCall(DMDAGetCorners(da, &xs, 0, 0, &xm, 0, 0));
200:   h = 1.0 / (mx - 1);

202:   for (i = xs; i < xs + xm; i++) {
203:     row.i = i;
204:     if (i == 0 || i == mx - 1) {
205:       v[0] = 2.0 / h;
206:       PetscCall(MatSetValuesStencil(jac, 1, &row, 1, &row, v, INSERT_VALUES));
207:     } else {
208:       v[0]     = (-1.0) / h;
209:       col[0].i = i - 1;
210:       v[1]     = (2.0) / h;
211:       col[1].i = row.i;
212:       v[2]     = (-1.0) / h;
213:       col[2].i = i + 1;
214:       PetscCall(MatSetValuesStencil(jac, 1, &row, 3, col, v, INSERT_VALUES));
215:     }
216:   }
217:   PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
218:   PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
219:   PetscFunctionReturn(PETSC_SUCCESS);
220: }

222: /*TEST

224:    test:
225:       nsize: 4
226:       args: -ksp_monitor -pc_type mg -da_refine 3

228: TEST*/