Actual source code: ex12.c


  2: static char help[] = "Solves a linear system in parallel with KSP,\n\
  3: demonstrating how to register a new preconditioner (PC) type.\n\
  4: Input parameters include:\n\
  5:   -m <mesh_x>       : number of mesh points in x-direction\n\
  6:   -n <mesh_y>       : number of mesh points in y-direction\n\n";

  8: /*
  9:    Demonstrates registering a new preconditioner (PC) type.

 11:    To register a PC type whose code is linked into the executable,
 12:    use PCRegister(). To register a PC type in a dynamic library use PCRegister()

 14:    Also provide the prototype for your PCCreate_XXX() function. In
 15:    this example we use the PETSc implementation of the Jacobi method,
 16:    PCCreate_Jacobi() just as an example.

 18:    See the file src/ksp/pc/impls/jacobi/jacobi.c for details on how to
 19:    write a new PC component.

 21:    See the manual page PCRegister() for details on how to register a method.
 22: */

 24: /*
 25:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 26:   automatically includes:
 27:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 28:      petscmat.h - matrices
 29:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 30:      petscviewer.h - viewers               petscpc.h  - preconditioners
 31: */
 32: #include <petscksp.h>

 34: PETSC_EXTERN PetscErrorCode PCCreate_Jacobi(PC);

 36: int main(int argc, char **args)
 37: {
 38:   Vec         x, b, u; /* approx solution, RHS, exact solution */
 39:   Mat         A;       /* linear system matrix */
 40:   KSP         ksp;     /* linear solver context */
 41:   PetscReal   norm;    /* norm of solution error */
 42:   PetscInt    i, j, Ii, J, Istart, Iend, m = 8, n = 7, its;
 43:   PetscScalar v, one = 1.0;
 44:   PC          pc; /* preconditioner context */

 47:   PetscInitialize(&argc, &args, (char *)0, help);
 48:   PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL);
 49:   PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL);

 51:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 52:          Compute the matrix and right-hand-side vector that define
 53:          the linear system, Ax = b.
 54:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 55:   /*
 56:      Create parallel matrix, specifying only its global dimensions.
 57:      When using MatCreate(), the matrix format can be specified at
 58:      runtime. Also, the parallel partitioning of the matrix can be
 59:      determined by PETSc at runtime.
 60:   */
 61:   MatCreate(PETSC_COMM_WORLD, &A);
 62:   MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n);
 63:   MatSetFromOptions(A);
 64:   MatSetUp(A);

 66:   /*
 67:      Currently, all PETSc parallel matrix formats are partitioned by
 68:      contiguous chunks of rows across the processors.  Determine which
 69:      rows of the matrix are locally owned.
 70:   */
 71:   MatGetOwnershipRange(A, &Istart, &Iend);

 73:   /*
 74:      Set matrix elements for the 2-D, five-point stencil in parallel.
 75:       - Each processor needs to insert only elements that it owns
 76:         locally (but any non-local elements will be sent to the
 77:         appropriate processor during matrix assembly).
 78:       - Always specify global rows and columns of matrix entries.
 79:    */
 80:   for (Ii = Istart; Ii < Iend; Ii++) {
 81:     v = -1.0;
 82:     i = Ii / n;
 83:     j = Ii - i * n;
 84:     if (i > 0) {
 85:       J = Ii - n;
 86:       MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
 87:     }
 88:     if (i < m - 1) {
 89:       J = Ii + n;
 90:       MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
 91:     }
 92:     if (j > 0) {
 93:       J = Ii - 1;
 94:       MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
 95:     }
 96:     if (j < n - 1) {
 97:       J = Ii + 1;
 98:       MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
 99:     }
100:     v = 4.0;
101:     MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES);
102:   }

104:   /*
105:      Assemble matrix, using the 2-step process:
106:        MatAssemblyBegin(), MatAssemblyEnd()
107:      Computations can be done while messages are in transition
108:      by placing code between these two statements.
109:   */
110:   MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
111:   MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);

113:   /*
114:      Create parallel vectors.
115:       - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
116:       we specify only the vector's global
117:         dimension; the parallel partitioning is determined at runtime.
118:       - When solving a linear system, the vectors and matrices MUST
119:         be partitioned accordingly.  PETSc automatically generates
120:         appropriately partitioned matrices and vectors when MatCreate()
121:         and VecCreate() are used with the same communicator.
122:       - Note: We form 1 vector from scratch and then duplicate as needed.
123:   */
124:   VecCreate(PETSC_COMM_WORLD, &u);
125:   VecSetSizes(u, PETSC_DECIDE, m * n);
126:   VecSetFromOptions(u);
127:   VecDuplicate(u, &b);
128:   VecDuplicate(b, &x);

130:   /*
131:      Set exact solution; then compute right-hand-side vector.
132:      Use an exact solution of a vector with all elements of 1.0;
133:   */
134:   VecSet(u, one);
135:   MatMult(A, u, b);

137:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138:                 Create the linear solver and set various options
139:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

141:   /*
142:      Create linear solver context
143:   */
144:   KSPCreate(PETSC_COMM_WORLD, &ksp);

146:   /*
147:      Set operators. Here the matrix that defines the linear system
148:      also serves as the preconditioning matrix.
149:   */
150:   KSPSetOperators(ksp, A, A);

152:   /*
153:        First register a new PC type with the command PCRegister()
154:   */
155:   PCRegister("ourjacobi", PCCreate_Jacobi);

157:   /*
158:      Set the PC type to be the new method
159:   */
160:   KSPGetPC(ksp, &pc);
161:   PCSetType(pc, "ourjacobi");

163:   /*
164:     Set runtime options, e.g.,
165:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
166:     These options will override those specified above as long as
167:     KSPSetFromOptions() is called _after_ any other customization
168:     routines.
169:   */
170:   KSPSetFromOptions(ksp);

172:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173:                       Solve the linear system
174:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

176:   KSPSolve(ksp, b, x);

178:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179:                       Check solution and clean up
180:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

182:   /*
183:      Check the error
184:   */
185:   VecAXPY(x, -1.0, u);
186:   VecNorm(x, NORM_2, &norm);
187:   KSPGetIterationNumber(ksp, &its);

189:   /*
190:      Print convergence information.  PetscPrintf() produces a single
191:      print statement from all processes that share a communicator.
192:   */
193:   PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its);

195:   /*
196:      Free work space.  All PETSc objects should be destroyed when they
197:      are no longer needed.
198:   */
199:   KSPDestroy(&ksp);
200:   VecDestroy(&u);
201:   VecDestroy(&x);
202:   VecDestroy(&b);
203:   MatDestroy(&A);

205:   /*
206:      Always call PetscFinalize() before exiting a program.  This routine
207:        - finalizes the PETSc libraries as well as MPI
208:        - provides summary and diagnostic information if certain runtime
209:          options are chosen (e.g., -log_view).
210:   */
211:   PetscFinalize();
212:   return 0;
213: }

215: /*TEST

217:    test:
218:       args: -ksp_gmres_cgs_refinement_type refine_always

220: TEST*/