Actual source code: ex11.c


  2: static char help[] = "Solves a linear system in parallel with KSP.\n\n";

  4: /*
  5:    Description: Solves a complex linear system in parallel with KSP.

  7:    The model problem:
  8:       Solve Helmholtz equation on the unit square: (0,1) x (0,1)
  9:           -delta u - sigma1*u + i*sigma2*u = f,
 10:            where delta = Laplace operator
 11:       Dirichlet b.c.'s on all sides
 12:       Use the 2-D, five-point finite difference stencil.

 14:    Compiling the code:
 15:       This code uses the complex numbers version of PETSc, so configure
 16:       must be run to enable this
 17: */

 19: /*
 20:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 21:   automatically includes:
 22:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 23:      petscmat.h - matrices
 24:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 25:      petscviewer.h - viewers               petscpc.h  - preconditioners
 26: */
 27: #include <petscksp.h>

 29: int main(int argc,char **args)
 30: {
 31:   Vec            x,b,u;      /* approx solution, RHS, exact solution */
 32:   Mat            A;            /* linear system matrix */
 33:   KSP            ksp;         /* linear solver context */
 34:   PetscReal      norm;         /* norm of solution error */
 35:   PetscInt       dim,i,j,Ii,J,Istart,Iend,n = 6,its,use_random;
 36:   PetscScalar    v,none = -1.0,sigma2,pfive = 0.5,*xa;
 37:   PetscRandom    rctx;
 38:   PetscReal      h2,sigma1 = 100.0;
 39:   PetscBool      flg = PETSC_FALSE;

 41:   PetscInitialize(&argc,&args,(char*)0,help);
 42:   PetscOptionsGetReal(NULL,NULL,"-sigma1",&sigma1,NULL);
 43:   PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
 44:   dim  = n*n;

 46:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 47:          Compute the matrix and right-hand-side vector that define
 48:          the linear system, Ax = b.
 49:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 50:   /*
 51:      Create parallel matrix, specifying only its global dimensions.
 52:      When using MatCreate(), the matrix format can be specified at
 53:      runtime. Also, the parallel partitioning of the matrix is
 54:      determined by PETSc at runtime.
 55:   */
 56:   MatCreate(PETSC_COMM_WORLD,&A);
 57:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,dim,dim);
 58:   MatSetFromOptions(A);
 59:   MatSetUp(A);

 61:   /*
 62:      Currently, all PETSc parallel matrix formats are partitioned by
 63:      contiguous chunks of rows across the processors.  Determine which
 64:      rows of the matrix are locally owned.
 65:   */
 66:   MatGetOwnershipRange(A,&Istart,&Iend);

 68:   /*
 69:      Set matrix elements in parallel.
 70:       - Each processor needs to insert only elements that it owns
 71:         locally (but any non-local elements will be sent to the
 72:         appropriate processor during matrix assembly).
 73:       - Always specify global rows and columns of matrix entries.
 74:   */

 76:   PetscOptionsGetBool(NULL,NULL,"-norandom",&flg,NULL);
 77:   if (flg) use_random = 0;
 78:   else use_random = 1;
 79:   if (use_random) {
 80:     PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
 81:     PetscRandomSetFromOptions(rctx);
 82:     PetscRandomSetInterval(rctx,0.0,PETSC_i);
 83:   } else {
 84:     sigma2 = 10.0*PETSC_i;
 85:   }
 86:   h2 = 1.0/((n+1)*(n+1));
 87:   for (Ii=Istart; Ii<Iend; Ii++) {
 88:     v = -1.0; i = Ii/n; j = Ii - i*n;
 89:     if (i>0) {
 90:       J = Ii-n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);
 91:     }
 92:     if (i<n-1) {
 93:       J = Ii+n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);
 94:     }
 95:     if (j>0) {
 96:       J = Ii-1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);
 97:     }
 98:     if (j<n-1) {
 99:       J = Ii+1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);
100:     }
101:     if (use_random) PetscRandomGetValue(rctx,&sigma2);
102:     v    = 4.0 - sigma1*h2 + sigma2*h2;
103:     MatSetValues(A,1,&Ii,1,&Ii,&v,ADD_VALUES);
104:   }
105:   if (use_random) PetscRandomDestroy(&rctx);

107:   /*
108:      Assemble matrix, using the 2-step process:
109:        MatAssemblyBegin(), MatAssemblyEnd()
110:      Computations can be done while messages are in transition
111:      by placing code between these two statements.
112:   */
113:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
114:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

116:   /*
117:      Create parallel vectors.
118:       - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
119:       we specify only the vector's global
120:         dimension; the parallel partitioning is determined at runtime.
121:       - Note: We form 1 vector from scratch and then duplicate as needed.
122:   */
123:   VecCreate(PETSC_COMM_WORLD,&u);
124:   VecSetSizes(u,PETSC_DECIDE,dim);
125:   VecSetFromOptions(u);
126:   VecDuplicate(u,&b);
127:   VecDuplicate(b,&x);

129:   /*
130:      Set exact solution; then compute right-hand-side vector.
131:   */

133:   if (use_random) {
134:     PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
135:     PetscRandomSetFromOptions(rctx);
136:     VecSetRandom(u,rctx);
137:   } else {
138:     VecSet(u,pfive);
139:   }
140:   MatMult(A,u,b);

142:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143:                 Create the linear solver and set various options
144:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

146:   /*
147:      Create linear solver context
148:   */
149:   KSPCreate(PETSC_COMM_WORLD,&ksp);

151:   /*
152:      Set operators. Here the matrix that defines the linear system
153:      also serves as the preconditioning matrix.
154:   */
155:   KSPSetOperators(ksp,A,A);

157:   /*
158:     Set runtime options, e.g.,
159:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
160:   */
161:   KSPSetFromOptions(ksp);

163:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164:                       Solve the linear system
165:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

167:   KSPSolve(ksp,b,x);

169:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
170:                       Check solution and clean up
171:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

173:   /*
174:       Print the first 3 entries of x; this demonstrates extraction of the
175:       real and imaginary components of the complex vector, x.
176:   */
177:   flg  = PETSC_FALSE;
178:   PetscOptionsGetBool(NULL,NULL,"-print_x3",&flg,NULL);
179:   if (flg) {
180:     VecGetArray(x,&xa);
181:     PetscPrintf(PETSC_COMM_WORLD,"The first three entries of x are:\n");
182:     for (i=0; i<3; i++) {
183:       PetscPrintf(PETSC_COMM_WORLD,"x[%D] = %g + %g i\n",i,(double)PetscRealPart(xa[i]),(double)PetscImaginaryPart(xa[i]));
184:     }
185:     VecRestoreArray(x,&xa);
186:   }

188:   /*
189:      Check the error
190:   */
191:   VecAXPY(x,none,u);
192:   VecNorm(x,NORM_2,&norm);
193:   KSPGetIterationNumber(ksp,&its);
194:   if (norm < 1.e-12) {
195:     PetscPrintf(PETSC_COMM_WORLD,"Norm of error < 1.e-12 iterations %D\n",its);
196:   } else {
197:     PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
198:   }

200:   /*
201:      Free work space.  All PETSc objects should be destroyed when they
202:      are no longer needed.
203:   */
204:   KSPDestroy(&ksp);
205:   if (use_random) PetscRandomDestroy(&rctx);
206:   VecDestroy(&u)); PetscCall(VecDestroy(&x);
207:   VecDestroy(&b)); PetscCall(MatDestroy(&A);
208:   PetscFinalize();
209:   return 0;
210: }

212: /*TEST

214:    build:
215:       requires: complex

217:    test:
218:       args: -n 6 -norandom -pc_type none -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always

220:    testset:
221:       suffix: deflation
222:       args: -norandom -pc_type deflation -ksp_monitor_short
223:       requires: superlu_dist

225:       test:
226:         nsize: 6

228:       test:
229:         nsize: 3
230:         args: -pc_deflation_compute_space {{db2 aggregation}}

232:       test:
233:         suffix: pc_deflation_init_only-0
234:         nsize: 4
235:         args: -ksp_type fgmres -pc_deflation_compute_space db4 -pc_deflation_compute_space_size 2 -pc_deflation_levels 2 -deflation_ksp_max_it 10
236:         #TODO remove suffix and next test when this works
237:         #args: -pc_deflation_init_only {{0 1}separate output}
238:         args: -pc_deflation_init_only 0

240:       test:
241:         suffix: pc_deflation_init_only-1
242:         nsize: 4
243:         args: -ksp_type fgmres -pc_deflation_compute_space db4 -pc_deflation_compute_space_size 2 -pc_deflation_levels 2 -deflation_ksp_max_it 10
244:         args: -pc_deflation_init_only 1

246: TEST*/