Actual source code: ex1.c
1: static char help[] = "Solves a tridiagonal linear system with KSP.\n\n";
3: /*
4: Include "petscksp.h" so that we can use KSP solvers. Note that this file
5: automatically includes:
6: petscsys.h - base PETSc routines petscvec.h - vectors
7: petscmat.h - matrices petscpc.h - preconditioners
8: petscis.h - index sets
9: petscviewer.h - viewers
11: Note: The corresponding parallel example is ex23.c
12: */
13: #include <petscksp.h>
15: int main(int argc, char **args)
16: {
17: Vec x, b, u; /* approx solution, RHS, exact solution */
18: Mat A; /* linear system matrix */
19: KSP ksp; /* linear solver context */
20: PC pc; /* preconditioner context */
21: PetscReal norm; /* norm of solution error */
22: PetscInt i, n = 10, col[3], its;
23: PetscMPIInt size;
24: PetscScalar value[3];
26: PetscFunctionBeginUser;
27: PetscCall(PetscInitialize(&argc, &args, NULL, help));
28: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
29: PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This is a uniprocessor example only!");
31: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
33: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
34: Compute the matrix and right-hand-side vector that define
35: the linear system, Ax = b.
36: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
38: /*
39: Create vectors. Note that we form 1 vector from scratch and
40: then duplicate as needed.
41: */
42: PetscCall(VecCreate(PETSC_COMM_SELF, &x));
43: PetscCall(PetscObjectSetName((PetscObject)x, "Solution"));
44: PetscCall(VecSetSizes(x, PETSC_DECIDE, n));
45: PetscCall(VecSetFromOptions(x));
46: PetscCall(VecDuplicate(x, &b));
47: PetscCall(VecDuplicate(x, &u));
49: /*
50: Create matrix. When using MatCreate(), the matrix format can
51: be specified at runtime.
53: Performance tuning note: For problems of substantial size,
54: preallocation of matrix memory is crucial for attaining good
55: performance. See the matrix chapter of the users manual for details.
56: */
57: PetscCall(MatCreate(PETSC_COMM_SELF, &A));
58: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, n, n));
59: PetscCall(MatSetFromOptions(A));
60: PetscCall(MatSetUp(A));
62: /*
63: Assemble matrix
64: */
65: value[0] = -1.0;
66: value[1] = 2.0;
67: value[2] = -1.0;
68: for (i = 1; i < n - 1; i++) {
69: col[0] = i - 1;
70: col[1] = i;
71: col[2] = i + 1;
72: PetscCall(MatSetValues(A, 1, &i, 3, col, value, INSERT_VALUES));
73: }
74: if (n > 1) {
75: i = n - 1;
76: col[0] = n - 2;
77: col[1] = n - 1;
78: PetscCall(MatSetValues(A, 1, &i, 2, col, value, INSERT_VALUES));
79: }
80: i = 0;
81: col[0] = 0;
82: col[1] = 1;
83: value[0] = 2.0;
84: value[1] = -1.0;
85: PetscCall(MatSetValues(A, 1, &i, n > 1 ? 2 : 1, col, value, INSERT_VALUES));
86: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
87: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
89: /*
90: Set exact solution; then compute right-hand-side vector.
91: */
92: PetscCall(VecSet(u, 1.0));
93: PetscCall(MatMult(A, u, b));
95: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
96: Create the linear solver and set various options
97: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
98: PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
100: /*
101: Set operators. Here the matrix that defines the linear system
102: also serves as the matrix that defines the preconditioner.
103: */
104: PetscCall(KSPSetOperators(ksp, A, A));
106: /*
107: Set linear solver defaults for this problem (optional).
108: - By extracting the KSP and PC contexts from the KSP context,
109: we can then directly call any KSP and PC routines to set
110: various options.
111: - The following four statements are optional; all of these
112: parameters could alternatively be specified at runtime via
113: KSPSetFromOptions();
114: */
115: if (!PCMPIServerActive) { /* cannot directly set KSP/PC options when using the MPI linear solver server */
116: PetscCall(KSPGetPC(ksp, &pc));
117: PetscCall(PCSetType(pc, PCJACOBI));
118: PetscCall(KSPSetTolerances(ksp, 1.e-5, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
119: }
121: /*
122: Set runtime options, e.g.,
123: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
124: These options will override those specified above as long as
125: KSPSetFromOptions() is called _after_ any other customization
126: routines.
127: */
128: PetscCall(KSPSetFromOptions(ksp));
130: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131: Solve the linear system
132: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
133: PetscCall(KSPSolve(ksp, b, x));
135: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136: Check the solution and clean up
137: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138: PetscCall(VecAXPY(x, -1.0, u));
139: PetscCall(VecNorm(x, NORM_2, &norm));
140: PetscCall(KSPGetIterationNumber(ksp, &its));
141: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Norm of error %g, Iterations %" PetscInt_FMT "\n", (double)norm, its));
143: /* check that KSP automatically handles the fact that the new non-zero values in the matrix are propagated to the KSP solver */
144: PetscCall(MatShift(A, 2.0));
145: PetscCall(KSPSolve(ksp, b, x));
147: /*
148: Free work space. All PETSc objects should be destroyed when they
149: are no longer needed.
150: */
151: PetscCall(KSPDestroy(&ksp));
153: /* test if prefixes properly propagate to PCMPI objects */
154: if (PCMPIServerActive) {
155: PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
156: PetscCall(KSPSetOptionsPrefix(ksp, "prefix_test_"));
157: PetscCall(MatSetOptionsPrefix(A, "prefix_test_"));
158: PetscCall(KSPSetOperators(ksp, A, A));
159: PetscCall(KSPSetFromOptions(ksp));
160: PetscCall(KSPSolve(ksp, b, x));
161: PetscCall(KSPDestroy(&ksp));
162: }
164: PetscCall(VecDestroy(&x));
165: PetscCall(VecDestroy(&u));
166: PetscCall(VecDestroy(&b));
167: PetscCall(MatDestroy(&A));
169: /*
170: Always call PetscFinalize() before exiting a program. This routine
171: - finalizes the PETSc libraries as well as MPI
172: - provides summary and diagnostic information if certain runtime
173: options are chosen (e.g., -log_view).
174: */
175: PetscCall(PetscFinalize());
176: return 0;
177: }
179: /*TEST
181: test:
182: args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
184: test:
185: suffix: library_preload
186: requires: defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
187: args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -library_preload
189: test:
190: suffix: 2
191: args: -pc_type sor -pc_sor_symmetric -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
193: test:
194: suffix: 2_aijcusparse
195: requires: cuda
196: args: -pc_type sor -pc_sor_symmetric -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda
197: args: -ksp_view
199: test:
200: suffix: 3
201: args: -pc_type eisenstat -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
203: test:
204: suffix: 3_aijcusparse
205: requires: cuda
206: args: -pc_type eisenstat -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda -ksp_view
208: test:
209: suffix: aijcusparse
210: requires: cuda
211: args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda -ksp_view
212: output_file: output/ex1_1_aijcusparse.out
214: test:
215: requires: defined(PETSC_USE_SINGLE_LIBRARY)
216: suffix: mpi_linear_solver_server_1
217: nsize: 3
218: filter: sed 's?ATOL?RTOL?g' | grep -v HERMITIAN | grep -v "shared memory"
219: # use the MPI Linear Solver Server
220: args: -mpi_linear_solver_server -mpi_linear_solver_server_view
221: # controls for the use of PCMPI on a particular system
222: args: -mpi_linear_solver_server_minimum_count_per_rank 5 -mpi_linear_solver_server_ksp_view -mpi_linear_solver_server_mat_view
223: # the usual options for the linear solver (in this case using the server)
224: args: -ksp_monitor -ksp_converged_reason -mat_view -ksp_view -ksp_type cg -pc_type none
225: # the options for the prefixed objects
226: args: -prefix_test_mpi_linear_solver_server_mat_view -prefix_test_ksp_monitor -prefix_test_mpi_linear_solver_server_minimum_count_per_rank 5
228: test:
229: requires: defined(PETSC_USE_SINGLE_LIBRARY)
230: suffix: mpi_linear_solver_server_1_shared_memory_false
231: nsize: 3
232: filter: sed 's?ATOL?RTOL?g' | grep -v HERMITIAN
233: # use the MPI Linear Solver Server
234: args: -mpi_linear_solver_server -mpi_linear_solver_server_view -mpi_linear_solver_server_use_shared_memory false
235: # controls for the use of PCMPI on a particular system
236: args: -mpi_linear_solver_server_minimum_count_per_rank 5 -mpi_linear_solver_server_ksp_view -mpi_linear_solver_server_mat_view
237: # the usual options for the linear solver (in this case using the server)
238: args: -ksp_monitor -ksp_converged_reason -mat_view -ksp_view -ksp_type cg -pc_type none
239: # the options for the prefixed objects
240: args: -prefix_test_mpi_linear_solver_server_mat_view -prefix_test_ksp_monitor -prefix_test_mpi_linear_solver_server_minimum_count_per_rank 5
242: test:
243: requires: !__float128
244: suffix: minit
245: args: -ksp_monitor -pc_type none -ksp_min_it 8
247: TEST*/