Actual source code: ex12.c

  1: static char help[] = "Solves a linear system in parallel with KSP,\n\
  2: demonstrating how to register a new preconditioner (PC) type.\n\
  3: Input parameters include:\n\
  4:   -m <mesh_x>       : number of mesh points in x-direction\n\
  5:   -n <mesh_y>       : number of mesh points in y-direction\n\n";

  7: /*
  8:    Demonstrates registering a new preconditioner (PC) type.

 10:    To register a PC type whose code is linked into the executable,
 11:    use PCRegister(). To register a PC type in a dynamic library use PCRegister()

 13:    Also provide the prototype for your PCCreate_XXX() function. In
 14:    this example we use the PETSc implementation of the Jacobi method,
 15:    PCCreate_Jacobi() just as an example.

 17:    See the file src/ksp/pc/impls/jacobi/jacobi.c for details on how to
 18:    write a new PC component.

 20:    See the manual page PCRegister() for details on how to register a method.
 21: */

 23: /*
 24:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 25:   automatically includes:
 26:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 27:      petscmat.h - matrices
 28:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 29:      petscviewer.h - viewers               petscpc.h  - preconditioners
 30: */
 31: #include <petscksp.h>

 33: PETSC_EXTERN PetscErrorCode PCCreate_Jacobi(PC);

 35: int main(int argc, char **args)
 36: {
 37:   Vec         x, b, u; /* approx solution, RHS, exact solution */
 38:   Mat         A;       /* linear system matrix */
 39:   KSP         ksp;     /* linear solver context */
 40:   PetscReal   norm;    /* norm of solution error */
 41:   PetscInt    i, j, Ii, J, Istart, Iend, m = 8, n = 7, its;
 42:   PetscScalar v, one = 1.0;
 43:   PC          pc; /* preconditioner context */

 45:   PetscFunctionBeginUser;
 46:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 47:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
 48:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));

 50:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 51:          Compute the matrix and right-hand-side vector that define
 52:          the linear system, Ax = b.
 53:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 54:   /*
 55:      Create parallel matrix, specifying only its global dimensions.
 56:      When using MatCreate(), the matrix format can be specified at
 57:      runtime. Also, the parallel partitioning of the matrix can be
 58:      determined by PETSc at runtime.
 59:   */
 60:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 61:   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
 62:   PetscCall(MatSetFromOptions(A));
 63:   PetscCall(MatSetUp(A));

 65:   /*
 66:      Currently, all PETSc parallel matrix formats are partitioned by
 67:      contiguous chunks of rows across the processors.  Determine which
 68:      rows of the matrix are locally owned.
 69:   */
 70:   PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));

 72:   /*
 73:      Set matrix elements for the 2-D, five-point stencil in parallel.
 74:       - Each processor needs to insert only elements that it owns
 75:         locally (but any non-local elements will be sent to the
 76:         appropriate processor during matrix assembly).
 77:       - Always specify global rows and columns of matrix entries.
 78:    */
 79:   for (Ii = Istart; Ii < Iend; Ii++) {
 80:     v = -1.0;
 81:     i = Ii / n;
 82:     j = Ii - i * n;
 83:     if (i > 0) {
 84:       J = Ii - n;
 85:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 86:     }
 87:     if (i < m - 1) {
 88:       J = Ii + n;
 89:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 90:     }
 91:     if (j > 0) {
 92:       J = Ii - 1;
 93:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 94:     }
 95:     if (j < n - 1) {
 96:       J = Ii + 1;
 97:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 98:     }
 99:     v = 4.0;
100:     PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
101:   }

103:   /*
104:      Assemble matrix, using the 2-step process:
105:        MatAssemblyBegin(), MatAssemblyEnd()
106:      Computations can be done while messages are in transition
107:      by placing code between these two statements.
108:   */
109:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
110:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

112:   /*
113:      Create parallel vectors.
114:       - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
115:       we specify only the vector's global
116:         dimension; the parallel partitioning is determined at runtime.
117:       - When solving a linear system, the vectors and matrices MUST
118:         be partitioned accordingly.  PETSc automatically generates
119:         appropriately partitioned matrices and vectors when MatCreate()
120:         and VecCreate() are used with the same communicator.
121:       - Note: We form 1 vector from scratch and then duplicate as needed.
122:   */
123:   PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
124:   PetscCall(VecSetSizes(u, PETSC_DECIDE, m * n));
125:   PetscCall(VecSetFromOptions(u));
126:   PetscCall(VecDuplicate(u, &b));
127:   PetscCall(VecDuplicate(b, &x));

129:   /*
130:      Set exact solution; then compute right-hand-side vector.
131:      Use an exact solution of a vector with all elements of 1.0;
132:   */
133:   PetscCall(VecSet(u, one));
134:   PetscCall(MatMult(A, u, b));

136:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137:                 Create the linear solver and set various options
138:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

140:   /*
141:      Create linear solver context
142:   */
143:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));

145:   /*
146:      Set operators. Here the matrix that defines the linear system
147:      also serves as the preconditioning matrix.
148:   */
149:   PetscCall(KSPSetOperators(ksp, A, A));

151:   /*
152:        First register a new PC type with the command PCRegister()
153:   */
154:   PetscCall(PCRegister("ourjacobi", PCCreate_Jacobi));

156:   /*
157:      Set the PC type to be the new method
158:   */
159:   PetscCall(KSPGetPC(ksp, &pc));
160:   PetscCall(PCSetType(pc, "ourjacobi"));

162:   /*
163:     Set runtime options, e.g.,
164:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
165:     These options will override those specified above as long as
166:     KSPSetFromOptions() is called _after_ any other customization
167:     routines.
168:   */
169:   PetscCall(KSPSetFromOptions(ksp));

171:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172:                       Solve the linear system
173:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

175:   PetscCall(KSPSolve(ksp, b, x));

177:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
178:                       Check solution and clean up
179:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

181:   /*
182:      Check the error
183:   */
184:   PetscCall(VecAXPY(x, -1.0, u));
185:   PetscCall(VecNorm(x, NORM_2, &norm));
186:   PetscCall(KSPGetIterationNumber(ksp, &its));

188:   /*
189:      Print convergence information.  PetscPrintf() produces a single
190:      print statement from all processes that share a communicator.
191:   */
192:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its));

194:   /*
195:      Free work space.  All PETSc objects should be destroyed when they
196:      are no longer needed.
197:   */
198:   PetscCall(KSPDestroy(&ksp));
199:   PetscCall(VecDestroy(&u));
200:   PetscCall(VecDestroy(&x));
201:   PetscCall(VecDestroy(&b));
202:   PetscCall(MatDestroy(&A));

204:   /*
205:      Always call PetscFinalize() before exiting a program.  This routine
206:        - finalizes the PETSc libraries as well as MPI
207:        - provides summary and diagnostic information if certain runtime
208:          options are chosen (e.g., -log_view).
209:   */
210:   PetscCall(PetscFinalize());
211:   return 0;
212: }

214: /*TEST

216:    test:
217:       args: -ksp_gmres_cgs_refinement_type refine_always

219: TEST*/