Actual source code: ex8.c


  2: static char help[] = "Illustrates use of the preconditioner ASM.\n\
  3: The Additive Schwarz Method for solving a linear system in parallel with KSP.  The\n\
  4: code indicates the procedure for setting user-defined subdomains.  Input\n\
  5: parameters include:\n\
  6:   -user_set_subdomain_solvers:  User explicitly sets subdomain solvers\n\
  7:   -user_set_subdomains:  Activate user-defined subdomains\n\n";

  9: /*
 10:    Note:  This example focuses on setting the subdomains for the ASM
 11:    preconditioner for a problem on a 2D rectangular grid.  See ex1.c
 12:    and ex2.c for more detailed comments on the basic usage of KSP
 13:    (including working with matrices and vectors).

 15:    The ASM preconditioner is fully parallel, but currently the routine
 16:    PCASMCreateSubdomains2D(), which is used in this example to demonstrate
 17:    user-defined subdomains (activated via -user_set_subdomains), is
 18:    uniprocessor only.

 20:    This matrix in this linear system arises from the discretized Laplacian,
 21:    and thus is not very interesting in terms of experimenting with variants
 22:    of the ASM preconditioner.
 23: */

 25: /*T
 26:    Concepts: KSP^Additive Schwarz Method (ASM) with user-defined subdomains
 27:    Processors: n
 28: T*/

 30: /*
 31:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 32:   automatically includes:
 33:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 34:      petscmat.h - matrices
 35:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 36:      petscviewer.h - viewers               petscpc.h  - preconditioners
 37: */
 38: #include <petscksp.h>

 40: int main(int argc,char **args)
 41: {
 42:   Vec            x,b,u;                 /* approx solution, RHS, exact solution */
 43:   Mat            A;                       /* linear system matrix */
 44:   KSP            ksp;                    /* linear solver context */
 45:   PC             pc;                      /* PC context */
 46:   IS             *is,*is_local;           /* array of index sets that define the subdomains */
 47:   PetscInt       overlap = 1;             /* width of subdomain overlap */
 48:   PetscInt       Nsub;                    /* number of subdomains */
 49:   PetscInt       m = 15,n = 17;          /* mesh dimensions in x- and y- directions */
 50:   PetscInt       M = 2,N = 1;            /* number of subdomains in x- and y- directions */
 51:   PetscInt       i,j,Ii,J,Istart,Iend;
 53:   PetscMPIInt    size;
 54:   PetscBool      flg;
 55:   PetscBool      user_subdomains = PETSC_FALSE;
 56:   PetscScalar    v, one = 1.0;
 57:   PetscReal      e;

 59:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 60:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 61:   PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
 62:   PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
 63:   PetscOptionsGetInt(NULL,NULL,"-Mdomains",&M,NULL);
 64:   PetscOptionsGetInt(NULL,NULL,"-Ndomains",&N,NULL);
 65:   PetscOptionsGetInt(NULL,NULL,"-overlap",&overlap,NULL);
 66:   PetscOptionsGetBool(NULL,NULL,"-user_set_subdomains",&user_subdomains,NULL);

 68:   /* -------------------------------------------------------------------
 69:          Compute the matrix and right-hand-side vector that define
 70:          the linear system, Ax = b.
 71:      ------------------------------------------------------------------- */

 73:   /*
 74:      Assemble the matrix for the five point stencil, YET AGAIN
 75:   */
 76:   MatCreate(PETSC_COMM_WORLD,&A);
 77:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
 78:   MatSetFromOptions(A);
 79:   MatSetUp(A);
 80:   MatGetOwnershipRange(A,&Istart,&Iend);
 81:   for (Ii=Istart; Ii<Iend; Ii++) {
 82:     v = -1.0; i = Ii/n; j = Ii - i*n;
 83:     if (i>0)   {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 84:     if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 85:     if (j>0)   {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 86:     if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 87:     v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
 88:   }
 89:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
 90:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

 92:   /*
 93:      Create and set vectors
 94:   */
 95:   MatCreateVecs(A,&u,&b);
 96:   VecDuplicate(u,&x);
 97:   VecSet(u,one);
 98:   MatMult(A,u,b);

100:   /*
101:      Create linear solver context
102:   */
103:   KSPCreate(PETSC_COMM_WORLD,&ksp);

105:   /*
106:      Set operators. Here the matrix that defines the linear system
107:      also serves as the preconditioning matrix.
108:   */
109:   KSPSetOperators(ksp,A,A);

111:   /*
112:      Set the default preconditioner for this program to be ASM
113:   */
114:   KSPGetPC(ksp,&pc);
115:   PCSetType(pc,PCASM);

117:   /* -------------------------------------------------------------------
118:                   Define the problem decomposition
119:      ------------------------------------------------------------------- */

121:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122:        Basic method, should be sufficient for the needs of many users.
123:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

125:      Set the overlap, using the default PETSc decomposition via
126:          PCASMSetOverlap(pc,overlap);
127:      Could instead use the option -pc_asm_overlap <ovl>

129:      Set the total number of blocks via -pc_asm_blocks <blks>
130:      Note:  The ASM default is to use 1 block per processor.  To
131:      experiment on a single processor with various overlaps, you
132:      must specify use of multiple blocks!
133:   */

135:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136:        More advanced method, setting user-defined subdomains
137:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

139:      Firstly, create index sets that define the subdomains.  The utility
140:      routine PCASMCreateSubdomains2D() is a simple example (that currently
141:      supports 1 processor only!).  More generally, the user should write
142:      a custom routine for a particular problem geometry.

144:      Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
145:      to set the subdomains for the ASM preconditioner.
146:   */

148:   if (!user_subdomains) { /* basic version */
149:     PCASMSetOverlap(pc,overlap);
150:   } else { /* advanced version */
151:     if (size != 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"PCASMCreateSubdomains2D() is currently a uniprocessor routine only!");
152:     PCASMCreateSubdomains2D(m,n,M,N,1,overlap,&Nsub,&is,&is_local);
153:     PCASMSetLocalSubdomains(pc,Nsub,is,is_local);
154:     flg  = PETSC_FALSE;
155:     PetscOptionsGetBool(NULL,NULL,"-subdomain_view",&flg,NULL);
156:     if (flg) {
157:       PetscPrintf(PETSC_COMM_SELF,"Nmesh points: %D x %D; subdomain partition: %D x %D; overlap: %D; Nsub: %D\n",m,n,M,N,overlap,Nsub);
158:       PetscPrintf(PETSC_COMM_SELF,"IS:\n");
159:       for (i=0; i<Nsub; i++) {
160:         PetscPrintf(PETSC_COMM_SELF,"  IS[%D]\n",i);
161:         ISView(is[i],PETSC_VIEWER_STDOUT_SELF);
162:       }
163:       PetscPrintf(PETSC_COMM_SELF,"IS_local:\n");
164:       for (i=0; i<Nsub; i++) {
165:         PetscPrintf(PETSC_COMM_SELF,"  IS_local[%D]\n",i);
166:         ISView(is_local[i],PETSC_VIEWER_STDOUT_SELF);
167:       }
168:     }
169:   }

171:   /* -------------------------------------------------------------------
172:                 Set the linear solvers for the subblocks
173:      ------------------------------------------------------------------- */

175:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176:        Basic method, should be sufficient for the needs of most users.
177:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

179:      By default, the ASM preconditioner uses the same solver on each
180:      block of the problem.  To set the same solver options on all blocks,
181:      use the prefix -sub before the usual PC and KSP options, e.g.,
182:           -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4

184:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185:         Advanced method, setting different solvers for various blocks.
186:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

188:      Note that each block's KSP context is completely independent of
189:      the others, and the full range of uniprocessor KSP options is
190:      available for each block.

192:      - Use PCASMGetSubKSP() to extract the array of KSP contexts for
193:        the local blocks.
194:      - See ex7.c for a simple example of setting different linear solvers
195:        for the individual blocks for the block Jacobi method (which is
196:        equivalent to the ASM method with zero overlap).
197:   */

199:   flg  = PETSC_FALSE;
200:   PetscOptionsGetBool(NULL,NULL,"-user_set_subdomain_solvers",&flg,NULL);
201:   if (flg) {
202:     KSP       *subksp;        /* array of KSP contexts for local subblocks */
203:     PetscInt  nlocal,first;   /* number of local subblocks, first local subblock */
204:     PC        subpc;          /* PC context for subblock */
205:     PetscBool isasm;

207:     PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");

209:     /*
210:        Set runtime options
211:     */
212:     KSPSetFromOptions(ksp);

214:     /*
215:        Flag an error if PCTYPE is changed from the runtime options
216:      */
217:     PetscObjectTypeCompare((PetscObject)pc,PCASM,&isasm);
218:     if (!isasm) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");

220:     /*
221:        Call KSPSetUp() to set the block Jacobi data structures (including
222:        creation of an internal KSP context for each block).

224:        Note: KSPSetUp() MUST be called before PCASMGetSubKSP().
225:     */
226:     KSPSetUp(ksp);

228:     /*
229:        Extract the array of KSP contexts for the local blocks
230:     */
231:     PCASMGetSubKSP(pc,&nlocal,&first,&subksp);

233:     /*
234:        Loop over the local blocks, setting various KSP options
235:        for each block.
236:     */
237:     for (i=0; i<nlocal; i++) {
238:       KSPGetPC(subksp[i],&subpc);
239:       PCSetType(subpc,PCILU);
240:       KSPSetType(subksp[i],KSPGMRES);
241:       KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
242:     }
243:   } else {
244:     /*
245:        Set runtime options
246:     */
247:     KSPSetFromOptions(ksp);
248:   }

250:   /* -------------------------------------------------------------------
251:                       Solve the linear system
252:      ------------------------------------------------------------------- */

254:   KSPSolve(ksp,b,x);

256:   /* -------------------------------------------------------------------
257:                       Compare result to the exact solution
258:      ------------------------------------------------------------------- */
259:   VecAXPY(x,-1.0,u);
260:   VecNorm(x,NORM_INFINITY, &e);

262:   flg  = PETSC_FALSE;
263:   PetscOptionsGetBool(NULL,NULL,"-print_error",&flg,NULL);
264:   if (flg) {
265:     PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n",(double) e);
266:   }

268:   /*
269:      Free work space.  All PETSc objects should be destroyed when they
270:      are no longer needed.
271:   */

273:   if (user_subdomains) {
274:     for (i=0; i<Nsub; i++) {
275:       ISDestroy(&is[i]);
276:       ISDestroy(&is_local[i]);
277:     }
278:     PetscFree(is);
279:     PetscFree(is_local);
280:   }
281:   KSPDestroy(&ksp);
282:   VecDestroy(&u);
283:   VecDestroy(&x);
284:   VecDestroy(&b);
285:   MatDestroy(&A);
286:   PetscFinalize();
287:   return ierr;
288: }

290: /*TEST

292:    test:
293:       suffix: 1
294:       args: -print_error

296: TEST*/