Actual source code: ex1.c

  1: static char help[] = "Solves a tridiagonal linear system with KSP.\n\n";

  3: /*
  4:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
  5:   automatically includes:
  6:      petscsys.h    - base PETSc routines   petscvec.h - vectors
  7:      petscmat.h    - matrices              petscpc.h  - preconditioners
  8:      petscis.h     - index sets
  9:      petscviewer.h - viewers

 11:   Note:  The corresponding parallel example is ex23.c
 12: */
 13: #include <petscksp.h>

 15: int main(int argc, char **args)
 16: {
 17:   Vec         x, b, u; /* approx solution, RHS, exact solution */
 18:   Mat         A;       /* linear system matrix */
 19:   KSP         ksp;     /* linear solver context */
 20:   PC          pc;      /* preconditioner context */
 21:   PetscReal   norm;    /* norm of solution error */
 22:   PetscInt    i, n = 10, col[3], its;
 23:   PetscMPIInt size;
 24:   PetscScalar value[3];

 26:   PetscFunctionBeginUser;
 27:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 28:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 29:   PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This is a uniprocessor example only!");

 31:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));

 33:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 34:          Compute the matrix and right-hand-side vector that define
 35:          the linear system, Ax = b.
 36:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 38:   /*
 39:      Create vectors.  Note that we form 1 vector from scratch and
 40:      then duplicate as needed.
 41:   */
 42:   PetscCall(VecCreate(PETSC_COMM_SELF, &x));
 43:   PetscCall(PetscObjectSetName((PetscObject)x, "Solution"));
 44:   PetscCall(VecSetSizes(x, PETSC_DECIDE, n));
 45:   PetscCall(VecSetFromOptions(x));
 46:   PetscCall(VecDuplicate(x, &b));
 47:   PetscCall(VecDuplicate(x, &u));

 49:   /*
 50:      Create matrix.  When using MatCreate(), the matrix format can
 51:      be specified at runtime.

 53:      Performance tuning note:  For problems of substantial size,
 54:      preallocation of matrix memory is crucial for attaining good
 55:      performance. See the matrix chapter of the users manual for details.
 56:   */
 57:   PetscCall(MatCreate(PETSC_COMM_SELF, &A));
 58:   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, n, n));
 59:   PetscCall(MatSetFromOptions(A));
 60:   PetscCall(MatSetUp(A));

 62:   /*
 63:      Assemble matrix
 64:   */
 65:   value[0] = -1.0;
 66:   value[1] = 2.0;
 67:   value[2] = -1.0;
 68:   for (i = 1; i < n - 1; i++) {
 69:     col[0] = i - 1;
 70:     col[1] = i;
 71:     col[2] = i + 1;
 72:     PetscCall(MatSetValues(A, 1, &i, 3, col, value, INSERT_VALUES));
 73:   }
 74:   i      = n - 1;
 75:   col[0] = n - 2;
 76:   col[1] = n - 1;
 77:   PetscCall(MatSetValues(A, 1, &i, 2, col, value, INSERT_VALUES));
 78:   i        = 0;
 79:   col[0]   = 0;
 80:   col[1]   = 1;
 81:   value[0] = 2.0;
 82:   value[1] = -1.0;
 83:   PetscCall(MatSetValues(A, 1, &i, 2, col, value, INSERT_VALUES));
 84:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
 85:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

 87:   /*
 88:      Set exact solution; then compute right-hand-side vector.
 89:   */
 90:   PetscCall(VecSet(u, 1.0));
 91:   PetscCall(MatMult(A, u, b));

 93:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 94:                 Create the linear solver and set various options
 95:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 96:   PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));

 98:   /*
 99:      Set operators. Here the matrix that defines the linear system
100:      also serves as the matrix that defines the preconditioner.
101:   */
102:   PetscCall(KSPSetOperators(ksp, A, A));

104:   /*
105:      Set linear solver defaults for this problem (optional).
106:      - By extracting the KSP and PC contexts from the KSP context,
107:        we can then directly call any KSP and PC routines to set
108:        various options.
109:      - The following four statements are optional; all of these
110:        parameters could alternatively be specified at runtime via
111:        KSPSetFromOptions();
112:   */
113:   PetscCall(KSPGetPC(ksp, &pc));
114:   PetscCall(PCSetType(pc, PCJACOBI));
115:   PetscCall(KSPSetTolerances(ksp, 1.e-5, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));

117:   /*
118:     Set runtime options, e.g.,
119:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
120:     These options will override those specified above as long as
121:     KSPSetFromOptions() is called _after_ any other customization
122:     routines.
123:   */
124:   PetscCall(KSPSetFromOptions(ksp));

126:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127:                       Solve the linear system
128:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129:   PetscCall(KSPSolve(ksp, b, x));

131:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132:                       Check the solution and clean up
133:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134:   PetscCall(VecAXPY(x, -1.0, u));
135:   PetscCall(VecNorm(x, NORM_2, &norm));
136:   PetscCall(KSPGetIterationNumber(ksp, &its));
137:   PetscCall(PetscPrintf(PETSC_COMM_SELF, "Norm of error %g, Iterations %" PetscInt_FMT "\n", (double)norm, its));

139:   /* check that KSP automatically handles the fact that the new non-zero values in the matrix are propagated to the KSP solver */
140:   PetscCall(MatShift(A, 2.0));
141:   PetscCall(KSPSolve(ksp, b, x));

143:   /*
144:      Free work space.  All PETSc objects should be destroyed when they
145:      are no longer needed.
146:   */
147:   PetscCall(KSPDestroy(&ksp));

149:   /* test if prefixes properly propagate to PCMPI objects */
150:   if (PCMPIServerActive) {
151:     PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
152:     PetscCall(KSPSetOptionsPrefix(ksp, "prefix_test_"));
153:     PetscCall(MatSetOptionsPrefix(A, "prefix_test_"));
154:     PetscCall(KSPSetOperators(ksp, A, A));
155:     PetscCall(KSPSetFromOptions(ksp));
156:     PetscCall(KSPSolve(ksp, b, x));
157:     PetscCall(KSPDestroy(&ksp));
158:   }

160:   PetscCall(VecDestroy(&x));
161:   PetscCall(VecDestroy(&u));
162:   PetscCall(VecDestroy(&b));
163:   PetscCall(MatDestroy(&A));

165:   /*
166:      Always call PetscFinalize() before exiting a program.  This routine
167:        - finalizes the PETSc libraries as well as MPI
168:        - provides summary and diagnostic information if certain runtime
169:          options are chosen (e.g., -log_view).
170:   */
171:   PetscCall(PetscFinalize());
172:   return 0;
173: }

175: /*TEST

177:    test:
178:       args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always

180:    test:
181:       suffix: 2
182:       args: -pc_type sor -pc_sor_symmetric -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always

184:    test:
185:       suffix: 2_aijcusparse
186:       requires: cuda
187:       args: -pc_type sor -pc_sor_symmetric -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda
188:       args: -ksp_view

190:    test:
191:       suffix: 3
192:       args: -pc_type eisenstat -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always

194:    test:
195:       suffix: 3_aijcusparse
196:       requires: cuda
197:       args: -pc_type eisenstat -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda -ksp_view

199:    test:
200:       suffix: aijcusparse
201:       requires: cuda
202:       args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda -ksp_view
203:       output_file: output/ex1_1_aijcusparse.out

205:    test:
206:       requires: defined(PETSC_USE_SINGLE_LIBRARY)
207:       suffix: mpi_linear_solver_server_1
208:       nsize: 3
209:       filter: sed 's?ATOL?RTOL?g' | grep -v HERMITIAN
210:       # use the MPI Linear Solver Server
211:       args: -mpi_linear_solver_server -mpi_linear_solver_server_view
212:       # controls for the use of PCMPI on a particular system
213:       args: -mpi_linear_solver_server_minimum_count_per_rank 5 -mpi_linear_solver_server_ksp_view -mpi_linear_solver_server_mat_view
214:       # the usual options for the linear solver (in this case using the server)
215:       args: -ksp_monitor -ksp_converged_reason -mat_view -ksp_view -ksp_type cg -pc_type none
216:       # the options for the prefixed objects
217:       args: -prefix_test_mpi_linear_solver_server_mat_view -prefix_test_ksp_monitor -prefix_test_mpi_linear_solver_server_minimum_count_per_rank 5

219:    test:
220:       requires: !__float128
221:       suffix: minit
222:       args: -ksp_monitor -pc_type none -ksp_min_it 8

224: TEST*/