Actual source code: ex18.c

  1: static const char help[] = "Solves a (permuted) linear system in parallel with KSP.\n\
  2: Input parameters include:\n\
  3:   -permute <natural,rcm,nd,...> : solve system in permuted indexing\n\
  4:   -random_exact_sol : use a random exact solution vector\n\
  5:   -view_exact_sol   : write exact solution vector to stdout\n\
  6:   -m <mesh_x>       : number of mesh points in x-direction\n\
  7:   -n <mesh_y>       : number of mesh points in y-direction\n\n";

  9: /*
 10:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 11:   automatically includes:
 12:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 13:      petscmat.h - matrices
 14:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 15:      petscviewer.h - viewers               petscpc.h  - preconditioners
 16: */
 17: #include <petscksp.h>

 19: int main(int argc, char **args)
 20: {
 21:   Vec           x, b, u; /* approx solution, RHS, exact solution */
 22:   Mat           A;       /* linear system matrix */
 23:   KSP           ksp;     /* linear solver context */
 24:   PetscRandom   rctx;    /* random number generator context */
 25:   PetscReal     norm;    /* norm of solution error */
 26:   PetscInt      i, j, Ii, J, Istart, Iend, m, n, its;
 27:   PetscBool     random_exact_sol, view_exact_sol, permute;
 28:   char          ordering[256] = MATORDERINGRCM;
 29:   IS            rowperm = NULL, colperm = NULL;
 30:   PetscScalar   v;
 31:   PetscLogStage stage;

 33:   PetscFunctionBeginUser;
 34:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 35:   PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "Poisson example options", "");
 36:   {
 37:     m = 8;
 38:     PetscCall(PetscOptionsInt("-m", "Number of grid points in x direction", "", m, &m, NULL));
 39:     n = m - 1;
 40:     PetscCall(PetscOptionsInt("-n", "Number of grid points in y direction", "", n, &n, NULL));
 41:     random_exact_sol = PETSC_FALSE;
 42:     PetscCall(PetscOptionsBool("-random_exact_sol", "Choose a random exact solution", "", random_exact_sol, &random_exact_sol, NULL));
 43:     view_exact_sol = PETSC_FALSE;
 44:     PetscCall(PetscOptionsBool("-view_exact_sol", "View exact solution", "", view_exact_sol, &view_exact_sol, NULL));
 45:     permute = PETSC_FALSE;
 46:     PetscCall(PetscOptionsFList("-permute", "Permute matrix and vector to solving in new ordering", "", MatOrderingList, ordering, ordering, sizeof(ordering), &permute));
 47:   }
 48:   PetscOptionsEnd();
 49:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 50:          Compute the matrix and right-hand-side vector that define
 51:          the linear system, Ax = b.
 52:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 53:   /*
 54:      Create parallel matrix, specifying only its global dimensions.
 55:      When using MatCreate(), the matrix format can be specified at
 56:      runtime. Also, the parallel partitioning of the matrix is
 57:      determined by PETSc at runtime.

 59:      Performance tuning note:  For problems of substantial size,
 60:      preallocation of matrix memory is crucial for attaining good
 61:      performance. See the matrix chapter of the users manual for details.
 62:   */
 63:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 64:   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
 65:   PetscCall(MatSetFromOptions(A));
 66:   PetscCall(MatMPIAIJSetPreallocation(A, 5, NULL, 5, NULL));
 67:   PetscCall(MatSeqAIJSetPreallocation(A, 5, NULL));
 68:   PetscCall(MatSetUp(A));

 70:   /*
 71:      Currently, all PETSc parallel matrix formats are partitioned by
 72:      contiguous chunks of rows across the processors.  Determine which
 73:      rows of the matrix are locally owned.
 74:   */
 75:   PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));

 77:   /*
 78:      Set matrix elements for the 2-D, five-point stencil in parallel.
 79:       - Each processor needs to insert only elements that it owns
 80:         locally (but any non-local elements will be sent to the
 81:         appropriate processor during matrix assembly).
 82:       - Always specify global rows and columns of matrix entries.

 84:      Note: this uses the less common natural ordering that orders first
 85:      all the unknowns for x = h then for x = 2h etc; Hence you see J = Ii +- n
 86:      instead of J = I +- m as you might expect. The more standard ordering
 87:      would first do all variables for y = h, then y = 2h etc.

 89:    */
 90:   PetscCall(PetscLogStageRegister("Assembly", &stage));
 91:   PetscCall(PetscLogStagePush(stage));
 92:   for (Ii = Istart; Ii < Iend; Ii++) {
 93:     v = -1.0;
 94:     i = Ii / n;
 95:     j = Ii - i * n;
 96:     if (i > 0) {
 97:       J = Ii - n;
 98:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 99:     }
100:     if (i < m - 1) {
101:       J = Ii + n;
102:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
103:     }
104:     if (j > 0) {
105:       J = Ii - 1;
106:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
107:     }
108:     if (j < n - 1) {
109:       J = Ii + 1;
110:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
111:     }
112:     v = 4.0;
113:     PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
114:   }

116:   /*
117:      Assemble matrix, using the 2-step process:
118:        MatAssemblyBegin(), MatAssemblyEnd()
119:      Computations can be done while messages are in transition
120:      by placing code between these two statements.
121:   */
122:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
123:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
124:   PetscCall(PetscLogStagePop());

126:   /* A is symmetric. Set symmetric flag to enable ICC/Cholesky preconditioner */
127:   PetscCall(MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE));

129:   /*
130:      Create parallel vectors.
131:       - We form 1 vector from scratch and then duplicate as needed.
132:       - When using VecCreate(), VecSetSizes and VecSetFromOptions()
133:         in this example, we specify only the
134:         vector's global dimension; the parallel partitioning is determined
135:         at runtime.
136:       - When solving a linear system, the vectors and matrices MUST
137:         be partitioned accordingly.  PETSc automatically generates
138:         appropriately partitioned matrices and vectors when MatCreate()
139:         and VecCreate() are used with the same communicator.
140:       - The user can alternatively specify the local vector and matrix
141:         dimensions when more sophisticated partitioning is needed
142:         (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
143:         below).
144:   */
145:   PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
146:   PetscCall(VecSetSizes(u, PETSC_DECIDE, m * n));
147:   PetscCall(VecSetFromOptions(u));
148:   PetscCall(VecDuplicate(u, &b));
149:   PetscCall(VecDuplicate(b, &x));

151:   /*
152:      Set exact solution; then compute right-hand-side vector.
153:      By default we use an exact solution of a vector with all
154:      elements of 1.0;  Alternatively, using the runtime option
155:      -random_sol forms a solution vector with random components.
156:   */
157:   if (random_exact_sol) {
158:     PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
159:     PetscCall(PetscRandomSetFromOptions(rctx));
160:     PetscCall(VecSetRandom(u, rctx));
161:     PetscCall(PetscRandomDestroy(&rctx));
162:   } else {
163:     PetscCall(VecSet(u, 1.0));
164:   }
165:   PetscCall(MatMult(A, u, b));

167:   /*
168:      View the exact solution vector if desired
169:   */
170:   if (view_exact_sol) PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));

172:   if (permute) {
173:     Mat Aperm;
174:     PetscCall(MatGetOrdering(A, ordering, &rowperm, &colperm));
175:     PetscCall(MatPermute(A, rowperm, colperm, &Aperm));
176:     PetscCall(VecPermute(b, rowperm, PETSC_FALSE));
177:     PetscCall(MatDestroy(&A));
178:     A = Aperm; /* Replace original operator with permuted version */
179:   }

181:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182:                 Create the linear solver and set various options
183:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

185:   /*
186:      Create linear solver context
187:   */
188:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));

190:   /*
191:      Set operators. Here the matrix that defines the linear system
192:      also serves as the preconditioning matrix.
193:   */
194:   PetscCall(KSPSetOperators(ksp, A, A));

196:   /*
197:      Set linear solver defaults for this problem (optional).
198:      - By extracting the KSP and PC contexts from the KSP context,
199:        we can then directly call any KSP and PC routines to set
200:        various options.
201:      - The following two statements are optional; all of these
202:        parameters could alternatively be specified at runtime via
203:        KSPSetFromOptions().  All of these defaults can be
204:        overridden at runtime, as indicated below.
205:   */
206:   PetscCall(KSPSetTolerances(ksp, 1.e-2 / ((m + 1) * (n + 1)), PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));

208:   /*
209:     Set runtime options, e.g.,
210:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
211:     These options will override those specified above as long as
212:     KSPSetFromOptions() is called _after_ any other customization
213:     routines.
214:   */
215:   PetscCall(KSPSetFromOptions(ksp));

217:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218:                       Solve the linear system
219:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

221:   PetscCall(KSPSolve(ksp, b, x));

223:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224:                       Check solution and clean up
225:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

227:   if (permute) PetscCall(VecPermute(x, colperm, PETSC_TRUE));

229:   /*
230:      Check the error
231:   */
232:   PetscCall(VecAXPY(x, -1.0, u));
233:   PetscCall(VecNorm(x, NORM_2, &norm));
234:   PetscCall(KSPGetIterationNumber(ksp, &its));

236:   /*
237:      Print convergence information.  PetscPrintf() produces a single
238:      print statement from all processes that share a communicator.
239:      An alternative is PetscFPrintf(), which prints to a file.
240:   */
241:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its));

243:   /*
244:      Free work space.  All PETSc objects should be destroyed when they
245:      are no longer needed.
246:   */
247:   PetscCall(KSPDestroy(&ksp));
248:   PetscCall(VecDestroy(&u));
249:   PetscCall(VecDestroy(&x));
250:   PetscCall(VecDestroy(&b));
251:   PetscCall(MatDestroy(&A));
252:   PetscCall(ISDestroy(&rowperm));
253:   PetscCall(ISDestroy(&colperm));

255:   /*
256:      Always call PetscFinalize() before exiting a program.  This routine
257:        - finalizes the PETSc libraries as well as MPI
258:        - provides summary and diagnostic information if certain runtime
259:          options are chosen (e.g., -log_view).
260:   */
261:   PetscCall(PetscFinalize());
262:   return 0;
263: }

265: /*TEST

267:    test:
268:       nsize: 3
269:       args: -m 39 -n 18 -ksp_monitor_short -permute nd
270:       requires: !single

272:    test:
273:       suffix: 2
274:       nsize: 3
275:       args: -m 39 -n 18 -ksp_monitor_short -permute rcm
276:       requires: !single

278:    test:
279:       suffix: 3
280:       nsize: 3
281:       args: -m 13 -n 17 -ksp_monitor_short -ksp_type cg -ksp_cg_single_reduction
282:       requires: !single

284:    test:
285:       suffix: bas
286:       args: -m 13 -n 17 -ksp_monitor_short -ksp_type cg -pc_type icc -pc_factor_mat_solver_type bas -ksp_view -pc_factor_levels 1
287:       filter: grep -v "variant "
288:       requires: !single

290: TEST*/