Actual source code: ex16.c
1: /* Usage: mpiexec ex16 [-help] [all PETSc options] */
3: static char help[] = "Solves a sequence of linear systems with different right-hand-side vectors.\n\
4: Input parameters include:\n\
5: -ntimes <ntimes> : number of linear systems to solve\n\
6: -view_exact_sol : write exact solution vector to stdout\n\
7: -m <mesh_x> : number of mesh points in x-direction\n\
8: -n <mesh_y> : number of mesh points in y-direction\n\n";
10: /*
11: Include "petscksp.h" so that we can use KSP solvers. Note that this file
12: automatically includes:
13: petscsys.h - base PETSc routines petscvec.h - vectors
14: petscmat.h - matrices
15: petscis.h - index sets petscksp.h - Krylov subspace methods
16: petscviewer.h - viewers petscpc.h - preconditioners
17: */
18: #include <petscksp.h>
20: int main(int argc, char **args)
21: {
22: Vec x, b, u; /* approx solution, RHS, exact solution */
23: Mat A; /* linear system matrix */
24: KSP ksp; /* linear solver context */
25: PetscReal norm; /* norm of solution error */
26: PetscInt ntimes, i, j, k, Ii, J, Istart, Iend;
27: PetscInt m = 8, n = 7, its;
28: PetscBool flg = PETSC_FALSE;
29: PetscScalar v, one = 1.0, rhs;
31: PetscFunctionBeginUser;
32: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
33: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
34: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
36: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
37: Compute the matrix for use in solving a series of
38: linear systems of the form, A x_i = b_i, for i=1,2,...
39: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
40: /*
41: Create parallel matrix, specifying only its global dimensions.
42: When using MatCreate(), the matrix format can be specified at
43: runtime. Also, the parallel partitioning of the matrix is
44: determined by PETSc at runtime.
45: */
46: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
47: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
48: PetscCall(MatSetFromOptions(A));
49: PetscCall(MatSetUp(A));
51: /*
52: Currently, all PETSc parallel matrix formats are partitioned by
53: contiguous chunks of rows across the processors. Determine which
54: rows of the matrix are locally owned.
55: */
56: PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));
58: /*
59: Set matrix elements for the 2-D, five-point stencil in parallel.
60: - Each processor needs to insert only elements that it owns
61: locally (but any non-local elements will be sent to the
62: appropriate processor during matrix assembly).
63: - Always specify global rows and columns of matrix entries.
64: */
65: for (Ii = Istart; Ii < Iend; Ii++) {
66: v = -1.0;
67: i = Ii / n;
68: j = Ii - i * n;
69: if (i > 0) {
70: J = Ii - n;
71: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
72: }
73: if (i < m - 1) {
74: J = Ii + n;
75: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
76: }
77: if (j > 0) {
78: J = Ii - 1;
79: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
80: }
81: if (j < n - 1) {
82: J = Ii + 1;
83: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
84: }
85: v = 4.0;
86: PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
87: }
89: /*
90: Assemble matrix, using the 2-step process:
91: MatAssemblyBegin(), MatAssemblyEnd()
92: Computations can be done while messages are in transition
93: by placing code between these two statements.
94: */
95: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
96: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
98: /*
99: Create parallel vectors.
100: - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
101: we specify only the vector's global
102: dimension; the parallel partitioning is determined at runtime.
103: - When solving a linear system, the vectors and matrices MUST
104: be partitioned accordingly. PETSc automatically generates
105: appropriately partitioned matrices and vectors when MatCreate()
106: and VecCreate() are used with the same communicator.
107: - Note: We form 1 vector from scratch and then duplicate as needed.
108: */
109: PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
110: PetscCall(VecSetSizes(u, PETSC_DECIDE, m * n));
111: PetscCall(VecSetFromOptions(u));
112: PetscCall(VecDuplicate(u, &b));
113: PetscCall(VecDuplicate(b, &x));
115: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116: Create the linear solver and set various options
117: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
119: /*
120: Create linear solver context
121: */
122: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
124: /*
125: Set operators. Here the matrix that defines the linear system
126: also serves as the preconditioning matrix.
127: */
128: PetscCall(KSPSetOperators(ksp, A, A));
130: /*
131: Set runtime options, e.g.,
132: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
133: These options will override those specified above as long as
134: KSPSetFromOptions() is called _after_ any other customization
135: routines.
136: */
137: PetscCall(KSPSetFromOptions(ksp));
139: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140: Solve several linear systems of the form A x_i = b_i
141: I.e., we retain the same matrix (A) for all systems, but
142: change the right-hand-side vector (b_i) at each step.
144: In this case, we simply call KSPSolve() multiple times. The
145: preconditioner setup operations (e.g., factorization for ILU)
146: be done during the first call to KSPSolve() only; such operations
147: will NOT be repeated for successive solves.
148: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
150: ntimes = 2;
151: PetscCall(PetscOptionsGetInt(NULL, NULL, "-ntimes", &ntimes, NULL));
152: for (k = 1; k < ntimes + 1; k++) {
153: /*
154: Set exact solution; then compute right-hand-side vector. We use
155: an exact solution of a vector with all elements equal to 1.0*k.
156: */
157: rhs = one * (PetscReal)k;
158: PetscCall(VecSet(u, rhs));
159: PetscCall(MatMult(A, u, b));
161: /*
162: View the exact solution vector if desired
163: */
164: PetscCall(PetscOptionsGetBool(NULL, NULL, "-view_exact_sol", &flg, NULL));
165: if (flg) PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
167: PetscCall(KSPSolve(ksp, b, x));
169: /*
170: Check the error
171: */
172: PetscCall(VecAXPY(x, -1.0, u));
173: PetscCall(VecNorm(x, NORM_2, &norm));
174: PetscCall(KSPGetIterationNumber(ksp, &its));
175: /*
176: Print convergence information. PetscPrintf() produces a single
177: print statement from all processes that share a communicator.
178: */
179: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g System %" PetscInt_FMT ": iterations %" PetscInt_FMT "\n", (double)norm, k, its));
180: }
182: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
183: Clean up
184: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
185: /*
186: Free work space. All PETSc objects should be destroyed when they
187: are no longer needed.
188: */
189: PetscCall(KSPDestroy(&ksp));
190: PetscCall(VecDestroy(&u));
191: PetscCall(VecDestroy(&x));
192: PetscCall(VecDestroy(&b));
193: PetscCall(MatDestroy(&A));
195: /*
196: Always call PetscFinalize() before exiting a program. This routine
197: - finalizes the PETSc libraries as well as MPI
198: - provides summary and diagnostic information if certain runtime
199: options are chosen (e.g., -log_view).
200: */
201: PetscCall(PetscFinalize());
202: return 0;
203: }
205: /*TEST
207: test:
208: nsize: 2
209: args: -ntimes 4 -ksp_gmres_cgs_refinement_type refine_always
211: TEST*/