Actual source code: ex11.c


  2: static char help[] = "Solves a linear system in parallel with KSP.\n\n";

  4: /*
  5:    Description: Solves a complex linear system in parallel with KSP.

  7:    The model problem:
  8:       Solve Helmholtz equation on the unit square: (0,1) x (0,1)
  9:           -delta u - sigma1*u + i*sigma2*u = f,
 10:            where delta = Laplace operator
 11:       Dirichlet b.c.'s on all sides
 12:       Use the 2-D, five-point finite difference stencil.

 14:    Compiling the code:
 15:       This code uses the complex numbers version of PETSc, so configure
 16:       must be run to enable this
 17: */

 19: /*
 20:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 21:   automatically includes:
 22:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 23:      petscmat.h - matrices
 24:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 25:      petscviewer.h - viewers               petscpc.h  - preconditioners
 26: */
 27: #include <petscksp.h>

 29: int main(int argc, char **args)
 30: {
 31:   Vec         x, b, u; /* approx solution, RHS, exact solution */
 32:   Mat         A;       /* linear system matrix */
 33:   KSP         ksp;     /* linear solver context */
 34:   PetscReal   norm;    /* norm of solution error */
 35:   PetscInt    dim, i, j, Ii, J, Istart, Iend, n = 6, its, use_random;
 36:   PetscScalar v, none = -1.0, sigma2, pfive = 0.5, *xa;
 37:   PetscRandom rctx;
 38:   PetscReal   h2, sigma1 = 100.0;
 39:   PetscBool   flg = PETSC_FALSE;

 41:   PetscFunctionBeginUser;
 42:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 43:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-sigma1", &sigma1, NULL));
 44:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
 45:   dim = n * n;

 47:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 48:          Compute the matrix and right-hand-side vector that define
 49:          the linear system, Ax = b.
 50:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 51:   /*
 52:      Create parallel matrix, specifying only its global dimensions.
 53:      When using MatCreate(), the matrix format can be specified at
 54:      runtime. Also, the parallel partitioning of the matrix is
 55:      determined by PETSc at runtime.
 56:   */
 57:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 58:   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, dim, dim));
 59:   PetscCall(MatSetFromOptions(A));
 60:   PetscCall(MatSetUp(A));

 62:   /*
 63:      Currently, all PETSc parallel matrix formats are partitioned by
 64:      contiguous chunks of rows across the processors.  Determine which
 65:      rows of the matrix are locally owned.
 66:   */
 67:   PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));

 69:   /*
 70:      Set matrix elements in parallel.
 71:       - Each processor needs to insert only elements that it owns
 72:         locally (but any non-local elements will be sent to the
 73:         appropriate processor during matrix assembly).
 74:       - Always specify global rows and columns of matrix entries.
 75:   */

 77:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-norandom", &flg, NULL));
 78:   if (flg) use_random = 0;
 79:   else use_random = 1;
 80:   if (use_random) {
 81:     PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
 82:     PetscCall(PetscRandomSetFromOptions(rctx));
 83:     PetscCall(PetscRandomSetInterval(rctx, 0.0, PETSC_i));
 84:   } else {
 85:     sigma2 = 10.0 * PETSC_i;
 86:   }
 87:   h2 = 1.0 / ((n + 1) * (n + 1));
 88:   for (Ii = Istart; Ii < Iend; Ii++) {
 89:     v = -1.0;
 90:     i = Ii / n;
 91:     j = Ii - i * n;
 92:     if (i > 0) {
 93:       J = Ii - n;
 94:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
 95:     }
 96:     if (i < n - 1) {
 97:       J = Ii + n;
 98:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
 99:     }
100:     if (j > 0) {
101:       J = Ii - 1;
102:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
103:     }
104:     if (j < n - 1) {
105:       J = Ii + 1;
106:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
107:     }
108:     if (use_random) PetscCall(PetscRandomGetValue(rctx, &sigma2));
109:     v = 4.0 - sigma1 * h2 + sigma2 * h2;
110:     PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, ADD_VALUES));
111:   }
112:   if (use_random) PetscCall(PetscRandomDestroy(&rctx));

114:   /*
115:      Assemble matrix, using the 2-step process:
116:        MatAssemblyBegin(), MatAssemblyEnd()
117:      Computations can be done while messages are in transition
118:      by placing code between these two statements.
119:   */
120:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
121:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

123:   /*
124:      Create parallel vectors.
125:       - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
126:       we specify only the vector's global
127:         dimension; the parallel partitioning is determined at runtime.
128:       - Note: We form 1 vector from scratch and then duplicate as needed.
129:   */
130:   PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
131:   PetscCall(VecSetSizes(u, PETSC_DECIDE, dim));
132:   PetscCall(VecSetFromOptions(u));
133:   PetscCall(VecDuplicate(u, &b));
134:   PetscCall(VecDuplicate(b, &x));

136:   /*
137:      Set exact solution; then compute right-hand-side vector.
138:   */

140:   if (use_random) {
141:     PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
142:     PetscCall(PetscRandomSetFromOptions(rctx));
143:     PetscCall(VecSetRandom(u, rctx));
144:   } else {
145:     PetscCall(VecSet(u, pfive));
146:   }
147:   PetscCall(MatMult(A, u, b));

149:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150:                 Create the linear solver and set various options
151:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

153:   /*
154:      Create linear solver context
155:   */
156:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));

158:   /*
159:      Set operators. Here the matrix that defines the linear system
160:      also serves as the preconditioning matrix.
161:   */
162:   PetscCall(KSPSetOperators(ksp, A, A));

164:   /*
165:     Set runtime options, e.g.,
166:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
167:   */
168:   PetscCall(KSPSetFromOptions(ksp));

170:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
171:                       Solve the linear system
172:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

174:   PetscCall(KSPSolve(ksp, b, x));

176:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177:                       Check solution and clean up
178:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

180:   /*
181:       Print the first 3 entries of x; this demonstrates extraction of the
182:       real and imaginary components of the complex vector, x.
183:   */
184:   flg = PETSC_FALSE;
185:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-print_x3", &flg, NULL));
186:   if (flg) {
187:     PetscCall(VecGetArray(x, &xa));
188:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "The first three entries of x are:\n"));
189:     for (i = 0; i < 3; i++) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "x[%" PetscInt_FMT "] = %g + %g i\n", i, (double)PetscRealPart(xa[i]), (double)PetscImaginaryPart(xa[i])));
190:     PetscCall(VecRestoreArray(x, &xa));
191:   }

193:   /*
194:      Check the error
195:   */
196:   PetscCall(VecAXPY(x, none, u));
197:   PetscCall(VecNorm(x, NORM_2, &norm));
198:   PetscCall(KSPGetIterationNumber(ksp, &its));
199:   if (norm < 1.e-12) {
200:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error < 1.e-12 iterations %" PetscInt_FMT "\n", its));
201:   } else {
202:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its));
203:   }

205:   /*
206:      Free work space.  All PETSc objects should be destroyed when they
207:      are no longer needed.
208:   */
209:   PetscCall(KSPDestroy(&ksp));
210:   if (use_random) PetscCall(PetscRandomDestroy(&rctx));
211:   PetscCall(VecDestroy(&u));
212:   PetscCall(VecDestroy(&x));
213:   PetscCall(VecDestroy(&b));
214:   PetscCall(MatDestroy(&A));
215:   PetscCall(PetscFinalize());
216:   return 0;
217: }

219: /*TEST

221:    build:
222:       requires: complex

224:    test:
225:       args: -n 6 -norandom -pc_type none -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always

227:    testset:
228:       suffix: deflation
229:       args: -norandom -pc_type deflation -ksp_monitor_short
230:       requires: superlu_dist

232:       test:
233:         nsize: 6

235:       test:
236:         nsize: 3
237:         args: -pc_deflation_compute_space {{db2 aggregation}}

239:       test:
240:         suffix: pc_deflation_init_only-0
241:         nsize: 4
242:         args: -ksp_type fgmres -pc_deflation_compute_space db4 -pc_deflation_compute_space_size 2 -pc_deflation_levels 2 -deflation_ksp_max_it 10
243:         #TODO remove suffix and next test when this works
244:         #args: -pc_deflation_init_only {{0 1}separate output}
245:         args: -pc_deflation_init_only 0

247:       test:
248:         suffix: pc_deflation_init_only-1
249:         nsize: 4
250:         args: -ksp_type fgmres -pc_deflation_compute_space db4 -pc_deflation_compute_space_size 2 -pc_deflation_levels 2 -deflation_ksp_max_it 10
251:         args: -pc_deflation_init_only 1

253: TEST*/