Actual source code: ex8.c
1: static char help[] = "Illustrates use of the preconditioner ASM.\n\
2: The Additive Schwarz Method for solving a linear system in parallel with KSP. The\n\
3: code indicates the procedure for setting user-defined subdomains. Input\n\
4: parameters include:\n\
5: -user_set_subdomain_solvers: User explicitly sets subdomain solvers\n\
6: -user_set_subdomains: Activate user-defined subdomains\n\n";
8: /*
9: Note: This example focuses on setting the subdomains for the ASM
10: preconditioner for a problem on a 2D rectangular grid. See ex1.c
11: and ex2.c for more detailed comments on the basic usage of KSP
12: (including working with matrices and vectors).
14: The ASM preconditioner is fully parallel, but currently the routine
15: PCASMCreateSubdomains2D(), which is used in this example to demonstrate
16: user-defined subdomains (activated via -user_set_subdomains), is
17: uniprocessor only.
19: This matrix in this linear system arises from the discretized Laplacian,
20: and thus is not very interesting in terms of experimenting with variants
21: of the ASM preconditioner.
22: */
24: /*
25: Include "petscksp.h" so that we can use KSP solvers. Note that this file
26: automatically includes:
27: petscsys.h - base PETSc routines petscvec.h - vectors
28: petscmat.h - matrices
29: petscis.h - index sets petscksp.h - Krylov subspace methods
30: petscviewer.h - viewers petscpc.h - preconditioners
31: */
32: #include <petscksp.h>
34: int main(int argc, char **args)
35: {
36: Vec x, b, u; /* approx solution, RHS, exact solution */
37: Mat A; /* linear system matrix */
38: KSP ksp; /* linear solver context */
39: PC pc; /* PC context */
40: IS *is, *is_local; /* array of index sets that define the subdomains */
41: PetscInt overlap = 1; /* width of subdomain overlap */
42: PetscInt Nsub; /* number of subdomains */
43: PetscInt m = 15, n = 17; /* mesh dimensions in x- and y- directions */
44: PetscInt M = 2, N = 1; /* number of subdomains in x- and y- directions */
45: PetscInt i, j, Ii, J, Istart, Iend;
46: PetscMPIInt size;
47: PetscBool flg;
48: PetscBool user_subdomains = PETSC_FALSE;
49: PetscScalar v, one = 1.0;
50: PetscReal e;
52: PetscFunctionBeginUser;
53: PetscCall(PetscInitialize(&argc, &args, NULL, help));
54: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
55: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
56: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
57: PetscCall(PetscOptionsGetInt(NULL, NULL, "-Mdomains", &M, NULL));
58: PetscCall(PetscOptionsGetInt(NULL, NULL, "-Ndomains", &N, NULL));
59: PetscCall(PetscOptionsGetInt(NULL, NULL, "-overlap", &overlap, NULL));
60: PetscCall(PetscOptionsGetBool(NULL, NULL, "-user_set_subdomains", &user_subdomains, NULL));
62: /* -------------------------------------------------------------------
63: Compute the matrix and right-hand-side vector that define
64: the linear system, Ax = b.
65: ------------------------------------------------------------------- */
67: /*
68: Assemble the matrix for the five point stencil, YET AGAIN
69: */
70: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
71: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
72: PetscCall(MatSetFromOptions(A));
73: PetscCall(MatSetUp(A));
74: PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));
75: for (Ii = Istart; Ii < Iend; Ii++) {
76: v = -1.0;
77: i = Ii / n;
78: j = Ii - i * n;
79: if (i > 0) {
80: J = Ii - n;
81: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
82: }
83: if (i < m - 1) {
84: J = Ii + n;
85: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
86: }
87: if (j > 0) {
88: J = Ii - 1;
89: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
90: }
91: if (j < n - 1) {
92: J = Ii + 1;
93: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
94: }
95: v = 4.0;
96: PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
97: }
98: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
99: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
101: /*
102: Create and set vectors
103: */
104: PetscCall(MatCreateVecs(A, &u, &b));
105: PetscCall(VecDuplicate(u, &x));
106: PetscCall(VecSet(u, one));
107: PetscCall(MatMult(A, u, b));
109: /*
110: Create linear solver context
111: */
112: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
114: /*
115: Set operators. Here the matrix that defines the linear system
116: also serves as the preconditioning matrix.
117: */
118: PetscCall(KSPSetOperators(ksp, A, A));
120: /*
121: Set the default preconditioner for this program to be ASM
122: */
123: PetscCall(KSPGetPC(ksp, &pc));
124: PetscCall(PCSetType(pc, PCASM));
126: /* -------------------------------------------------------------------
127: Define the problem decomposition
128: ------------------------------------------------------------------- */
130: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131: Basic method, should be sufficient for the needs of many users.
132: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134: Set the overlap, using the default PETSc decomposition via
135: PCASMSetOverlap(pc,overlap);
136: Could instead use the option -pc_asm_overlap <ovl>
138: Set the total number of blocks via -pc_asm_blocks <blks>
139: Note: The ASM default is to use 1 block per processor. To
140: experiment on a single processor with various overlaps, you
141: must specify use of multiple blocks!
142: */
144: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145: More advanced method, setting user-defined subdomains
146: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: Firstly, create index sets that define the subdomains. The utility
149: routine PCASMCreateSubdomains2D() is a simple example (that currently
150: supports 1 processor only!). More generally, the user should write
151: a custom routine for a particular problem geometry.
153: Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
154: to set the subdomains for the ASM preconditioner.
155: */
157: if (!user_subdomains) { /* basic version */
158: PetscCall(PCASMSetOverlap(pc, overlap));
159: } else { /* advanced version */
160: PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_SUP, "PCASMCreateSubdomains2D() is currently a uniprocessor routine only!");
161: PetscCall(PCASMCreateSubdomains2D(m, n, M, N, 1, overlap, &Nsub, &is, &is_local));
162: PetscCall(PCASMSetLocalSubdomains(pc, Nsub, is, is_local));
163: flg = PETSC_FALSE;
164: PetscCall(PetscOptionsGetBool(NULL, NULL, "-subdomain_view", &flg, NULL));
165: if (flg) {
166: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Nmesh points: %" PetscInt_FMT " x %" PetscInt_FMT "; subdomain partition: %" PetscInt_FMT " x %" PetscInt_FMT "; overlap: %" PetscInt_FMT "; Nsub: %" PetscInt_FMT "\n", m, n, M, N, overlap, Nsub));
167: PetscCall(PetscPrintf(PETSC_COMM_SELF, "IS:\n"));
168: for (i = 0; i < Nsub; i++) {
169: PetscCall(PetscPrintf(PETSC_COMM_SELF, " IS[%" PetscInt_FMT "]\n", i));
170: PetscCall(ISView(is[i], PETSC_VIEWER_STDOUT_SELF));
171: }
172: PetscCall(PetscPrintf(PETSC_COMM_SELF, "IS_local:\n"));
173: for (i = 0; i < Nsub; i++) {
174: PetscCall(PetscPrintf(PETSC_COMM_SELF, " IS_local[%" PetscInt_FMT "]\n", i));
175: PetscCall(ISView(is_local[i], PETSC_VIEWER_STDOUT_SELF));
176: }
177: }
178: }
180: /* -------------------------------------------------------------------
181: Set the linear solvers for the subblocks
182: ------------------------------------------------------------------- */
184: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185: Basic method, should be sufficient for the needs of most users.
186: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188: By default, the ASM preconditioner uses the same solver on each
189: block of the problem. To set the same solver options on all blocks,
190: use the prefix -sub before the usual PC and KSP options, e.g.,
191: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
193: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
194: Advanced method, setting different solvers for various blocks.
195: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197: Note that each block's KSP context is completely independent of
198: the others, and the full range of uniprocessor KSP options is
199: available for each block.
201: - Use PCASMGetSubKSP() to extract the array of KSP contexts for
202: the local blocks.
203: - See ex7.c for a simple example of setting different linear solvers
204: for the individual blocks for the block Jacobi method (which is
205: equivalent to the ASM method with zero overlap).
206: */
208: flg = PETSC_FALSE;
209: PetscCall(PetscOptionsGetBool(NULL, NULL, "-user_set_subdomain_solvers", &flg, NULL));
210: if (flg) {
211: KSP *subksp; /* array of KSP contexts for local subblocks */
212: PetscInt nlocal, first; /* number of local subblocks, first local subblock */
213: PC subpc; /* PC context for subblock */
214: PetscBool isasm;
216: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "User explicitly sets subdomain solvers.\n"));
218: /*
219: Set runtime options
220: */
221: PetscCall(KSPSetFromOptions(ksp));
223: /*
224: Flag an error if PCTYPE is changed from the runtime options
225: */
226: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &isasm));
227: PetscCheck(isasm, PETSC_COMM_WORLD, PETSC_ERR_SUP, "Cannot Change the PCTYPE when manually changing the subdomain solver settings");
229: /*
230: Call KSPSetUp() to set the block Jacobi data structures (including
231: creation of an internal KSP context for each block).
233: Note: KSPSetUp() MUST be called before PCASMGetSubKSP().
234: */
235: PetscCall(KSPSetUp(ksp));
237: /*
238: Extract the array of KSP contexts for the local blocks
239: */
240: PetscCall(PCASMGetSubKSP(pc, &nlocal, &first, &subksp));
242: /*
243: Loop over the local blocks, setting various KSP options
244: for each block.
245: */
246: for (i = 0; i < nlocal; i++) {
247: PetscCall(KSPGetPC(subksp[i], &subpc));
248: PetscCall(PCSetType(subpc, PCILU));
249: PetscCall(KSPSetType(subksp[i], KSPGMRES));
250: PetscCall(KSPSetTolerances(subksp[i], 1.e-7, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
251: }
252: } else {
253: /*
254: Set runtime options
255: */
256: PetscCall(KSPSetFromOptions(ksp));
257: }
259: /* -------------------------------------------------------------------
260: Solve the linear system
261: ------------------------------------------------------------------- */
263: PetscCall(KSPSolve(ksp, b, x));
265: /* -------------------------------------------------------------------
266: Compare result to the exact solution
267: ------------------------------------------------------------------- */
268: PetscCall(VecAXPY(x, -1.0, u));
269: PetscCall(VecNorm(x, NORM_INFINITY, &e));
271: flg = PETSC_FALSE;
272: PetscCall(PetscOptionsGetBool(NULL, NULL, "-print_error", &flg, NULL));
273: if (flg) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n", (double)e));
275: /*
276: Free work space. All PETSc objects should be destroyed when they
277: are no longer needed.
278: */
280: if (user_subdomains) {
281: for (i = 0; i < Nsub; i++) {
282: PetscCall(ISDestroy(&is[i]));
283: PetscCall(ISDestroy(&is_local[i]));
284: }
285: PetscCall(PetscFree(is));
286: PetscCall(PetscFree(is_local));
287: }
288: PetscCall(KSPDestroy(&ksp));
289: PetscCall(VecDestroy(&u));
290: PetscCall(VecDestroy(&x));
291: PetscCall(VecDestroy(&b));
292: PetscCall(MatDestroy(&A));
293: PetscCall(PetscFinalize());
294: return 0;
295: }
297: /*TEST
299: test:
300: suffix: 1
301: args: -print_error
303: TEST*/