Actual source code: ex15.c
2: static char help[] = "Solves a linear system in parallel with KSP. Also\n\
3: illustrates setting a user-defined shell preconditioner and using the\n\
4: Input parameters include:\n\
5: -user_defined_pc : Activate a user-defined preconditioner\n\n";
7: /*
8: Include "petscksp.h" so that we can use KSP solvers. Note that this file
9: automatically includes:
10: petscsys.h - base PETSc routines petscvec.h - vectors
11: petscmat.h - matrices
12: petscis.h - index sets petscksp.h - Krylov subspace methods
13: petscviewer.h - viewers petscpc.h - preconditioners
14: */
15: #include <petscksp.h>
17: /* Define context for user-provided preconditioner */
18: typedef struct {
19: Vec diag;
20: } SampleShellPC;
22: /* Declare routines for user-provided preconditioner */
23: extern PetscErrorCode SampleShellPCCreate(SampleShellPC **);
24: extern PetscErrorCode SampleShellPCSetUp(PC, Mat, Vec);
25: extern PetscErrorCode SampleShellPCApply(PC, Vec x, Vec y);
26: extern PetscErrorCode SampleShellPCDestroy(PC);
28: /*
29: User-defined routines. Note that immediately before each routine below,
30: If defined, this macro is used in the PETSc error handlers to provide a
31: complete traceback of routine names. All PETSc library routines use this
32: macro, and users can optionally employ it as well in their application
33: codes. Note that users can get a traceback of PETSc errors regardless of
34: provides the added traceback detail of the application routine names.
35: */
37: int main(int argc, char **args)
38: {
39: Vec x, b, u; /* approx solution, RHS, exact solution */
40: Mat A; /* linear system matrix */
41: KSP ksp; /* linear solver context */
42: PC pc; /* preconditioner context */
43: PetscReal norm; /* norm of solution error */
44: SampleShellPC *shell; /* user-defined preconditioner context */
45: PetscScalar v, one = 1.0, none = -1.0;
46: PetscInt i, j, Ii, J, Istart, Iend, m = 8, n = 7, its;
47: PetscBool user_defined_pc = PETSC_FALSE;
49: PetscFunctionBeginUser;
50: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
51: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
52: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
54: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
55: Compute the matrix and right-hand-side vector that define
56: the linear system, Ax = b.
57: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
58: /*
59: Create parallel matrix, specifying only its global dimensions.
60: When using MatCreate(), the matrix format can be specified at
61: runtime. Also, the parallel partitioning of the matrix is
62: determined by PETSc at runtime.
63: */
64: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
65: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
66: PetscCall(MatSetFromOptions(A));
67: PetscCall(MatSetUp(A));
69: /*
70: Currently, all PETSc parallel matrix formats are partitioned by
71: contiguous chunks of rows across the processors. Determine which
72: rows of the matrix are locally owned.
73: */
74: PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));
76: /*
77: Set matrix elements for the 2-D, five-point stencil in parallel.
78: - Each processor needs to insert only elements that it owns
79: locally (but any non-local elements will be sent to the
80: appropriate processor during matrix assembly).
81: - Always specify global rows and columns of matrix entries.
82: */
83: for (Ii = Istart; Ii < Iend; Ii++) {
84: v = -1.0;
85: i = Ii / n;
86: j = Ii - i * n;
87: if (i > 0) {
88: J = Ii - n;
89: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
90: }
91: if (i < m - 1) {
92: J = Ii + n;
93: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
94: }
95: if (j > 0) {
96: J = Ii - 1;
97: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
98: }
99: if (j < n - 1) {
100: J = Ii + 1;
101: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
102: }
103: v = 4.0;
104: PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
105: }
107: /*
108: Assemble matrix, using the 2-step process:
109: MatAssemblyBegin(), MatAssemblyEnd()
110: Computations can be done while messages are in transition
111: by placing code between these two statements.
112: */
113: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
114: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
116: /*
117: Create parallel vectors.
118: - When using VecCreate() VecSetSizes() and VecSetFromOptions(),
119: we specify only the vector's global
120: dimension; the parallel partitioning is determined at runtime.
121: - Note: We form 1 vector from scratch and then duplicate as needed.
122: */
123: PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
124: PetscCall(VecSetSizes(u, PETSC_DECIDE, m * n));
125: PetscCall(VecSetFromOptions(u));
126: PetscCall(VecDuplicate(u, &b));
127: PetscCall(VecDuplicate(b, &x));
129: /*
130: Set exact solution; then compute right-hand-side vector.
131: */
132: PetscCall(VecSet(u, one));
133: PetscCall(MatMult(A, u, b));
135: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136: Create the linear solver and set various options
137: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
139: /*
140: Create linear solver context
141: */
142: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
144: /*
145: Set operators. Here the matrix that defines the linear system
146: also serves as the preconditioning matrix.
147: */
148: PetscCall(KSPSetOperators(ksp, A, A));
150: /*
151: Set linear solver defaults for this problem (optional).
152: - By extracting the KSP and PC contexts from the KSP context,
153: we can then directly call any KSP and PC routines
154: to set various options.
155: */
156: PetscCall(KSPGetPC(ksp, &pc));
157: PetscCall(KSPSetTolerances(ksp, 1.e-7, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
159: /*
160: Set a user-defined "shell" preconditioner if desired
161: */
162: PetscCall(PetscOptionsGetBool(NULL, NULL, "-user_defined_pc", &user_defined_pc, NULL));
163: if (user_defined_pc) {
164: /* (Required) Indicate to PETSc that we're using a "shell" preconditioner */
165: PetscCall(PCSetType(pc, PCSHELL));
167: /* (Optional) Create a context for the user-defined preconditioner; this
168: context can be used to contain any application-specific data. */
169: PetscCall(SampleShellPCCreate(&shell));
171: /* (Required) Set the user-defined routine for applying the preconditioner */
172: PetscCall(PCShellSetApply(pc, SampleShellPCApply));
173: PetscCall(PCShellSetContext(pc, shell));
175: /* (Optional) Set user-defined function to free objects used by custom preconditioner */
176: PetscCall(PCShellSetDestroy(pc, SampleShellPCDestroy));
178: /* (Optional) Set a name for the preconditioner, used for PCView() */
179: PetscCall(PCShellSetName(pc, "MyPreconditioner"));
181: /* (Optional) Do any setup required for the preconditioner */
182: /* Note: This function could be set with PCShellSetSetUp and it would be called when necessary */
183: PetscCall(SampleShellPCSetUp(pc, A, x));
185: } else {
186: PetscCall(PCSetType(pc, PCJACOBI));
187: }
189: /*
190: Set runtime options, e.g.,
191: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
192: These options will override those specified above as long as
193: KSPSetFromOptions() is called _after_ any other customization
194: routines.
195: */
196: PetscCall(KSPSetFromOptions(ksp));
198: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
199: Solve the linear system
200: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
202: PetscCall(KSPSolve(ksp, b, x));
204: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205: Check solution and clean up
206: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
208: /*
209: Check the error
210: */
211: PetscCall(VecAXPY(x, none, u));
212: PetscCall(VecNorm(x, NORM_2, &norm));
213: PetscCall(KSPGetIterationNumber(ksp, &its));
214: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its));
216: /*
217: Free work space. All PETSc objects should be destroyed when they
218: are no longer needed.
219: */
220: PetscCall(KSPDestroy(&ksp));
221: PetscCall(VecDestroy(&u));
222: PetscCall(VecDestroy(&x));
223: PetscCall(VecDestroy(&b));
224: PetscCall(MatDestroy(&A));
226: PetscCall(PetscFinalize());
227: return 0;
228: }
230: /***********************************************************************/
231: /* Routines for a user-defined shell preconditioner */
232: /***********************************************************************/
234: /*
235: SampleShellPCCreate - This routine creates a user-defined
236: preconditioner context.
238: Output Parameter:
239: . shell - user-defined preconditioner context
240: */
241: PetscErrorCode SampleShellPCCreate(SampleShellPC **shell)
242: {
243: SampleShellPC *newctx;
245: PetscFunctionBeginUser;
246: PetscCall(PetscNew(&newctx));
247: newctx->diag = 0;
248: *shell = newctx;
249: PetscFunctionReturn(PETSC_SUCCESS);
250: }
251: /* ------------------------------------------------------------------- */
252: /*
253: SampleShellPCSetUp - This routine sets up a user-defined
254: preconditioner context.
256: Input Parameters:
257: . pc - preconditioner object
258: . pmat - preconditioner matrix
259: . x - vector
261: Output Parameter:
262: . shell - fully set up user-defined preconditioner context
264: Notes:
265: In this example, we define the shell preconditioner to be Jacobi's
266: method. Thus, here we create a work vector for storing the reciprocal
267: of the diagonal of the preconditioner matrix; this vector is then
268: used within the routine SampleShellPCApply().
269: */
270: PetscErrorCode SampleShellPCSetUp(PC pc, Mat pmat, Vec x)
271: {
272: SampleShellPC *shell;
273: Vec diag;
275: PetscFunctionBeginUser;
276: PetscCall(PCShellGetContext(pc, &shell));
277: PetscCall(VecDuplicate(x, &diag));
278: PetscCall(MatGetDiagonal(pmat, diag));
279: PetscCall(VecReciprocal(diag));
281: shell->diag = diag;
282: PetscFunctionReturn(PETSC_SUCCESS);
283: }
284: /* ------------------------------------------------------------------- */
285: /*
286: SampleShellPCApply - This routine demonstrates the use of a
287: user-provided preconditioner.
289: Input Parameters:
290: + pc - preconditioner object
291: - x - input vector
293: Output Parameter:
294: . y - preconditioned vector
296: Notes:
297: This code implements the Jacobi preconditioner, merely as an
298: example of working with a PCSHELL. Note that the Jacobi method
299: is already provided within PETSc.
300: */
301: PetscErrorCode SampleShellPCApply(PC pc, Vec x, Vec y)
302: {
303: SampleShellPC *shell;
305: PetscFunctionBeginUser;
306: PetscCall(PCShellGetContext(pc, &shell));
307: PetscCall(VecPointwiseMult(y, x, shell->diag));
308: PetscFunctionReturn(PETSC_SUCCESS);
309: }
310: /* ------------------------------------------------------------------- */
311: /*
312: SampleShellPCDestroy - This routine destroys a user-defined
313: preconditioner context.
315: Input Parameter:
316: . shell - user-defined preconditioner context
317: */
318: PetscErrorCode SampleShellPCDestroy(PC pc)
319: {
320: SampleShellPC *shell;
322: PetscFunctionBeginUser;
323: PetscCall(PCShellGetContext(pc, &shell));
324: PetscCall(VecDestroy(&shell->diag));
325: PetscCall(PetscFree(shell));
326: PetscFunctionReturn(PETSC_SUCCESS);
327: }
329: /*TEST
331: build:
332: requires: !complex !single
334: test:
335: nsize: 2
336: args: -ksp_view -user_defined_pc -ksp_gmres_cgs_refinement_type refine_always
338: test:
339: suffix: tsirm
340: args: -m 60 -n 60 -ksp_type tsirm -pc_type ksp -ksp_monitor_short -ksp_ksp_type fgmres -ksp_ksp_rtol 1e-10 -ksp_pc_type mg -ksp_ksp_max_it 30
341: timeoutfactor: 4
343: TEST*/