Actual source code: ex8.c


  2: static char help[] = "Illustrates use of the preconditioner ASM.\n\
  3: The Additive Schwarz Method for solving a linear system in parallel with KSP.  The\n\
  4: code indicates the procedure for setting user-defined subdomains.  Input\n\
  5: parameters include:\n\
  6:   -user_set_subdomain_solvers:  User explicitly sets subdomain solvers\n\
  7:   -user_set_subdomains:  Activate user-defined subdomains\n\n";

  9: /*
 10:    Note:  This example focuses on setting the subdomains for the ASM
 11:    preconditioner for a problem on a 2D rectangular grid.  See ex1.c
 12:    and ex2.c for more detailed comments on the basic usage of KSP
 13:    (including working with matrices and vectors).

 15:    The ASM preconditioner is fully parallel, but currently the routine
 16:    PCASMCreateSubdomains2D(), which is used in this example to demonstrate
 17:    user-defined subdomains (activated via -user_set_subdomains), is
 18:    uniprocessor only.

 20:    This matrix in this linear system arises from the discretized Laplacian,
 21:    and thus is not very interesting in terms of experimenting with variants
 22:    of the ASM preconditioner.
 23: */

 25: /*
 26:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 27:   automatically includes:
 28:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 29:      petscmat.h - matrices
 30:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 31:      petscviewer.h - viewers               petscpc.h  - preconditioners
 32: */
 33: #include <petscksp.h>

 35: int main(int argc,char **args)
 36: {
 37:   Vec            x,b,u;                 /* approx solution, RHS, exact solution */
 38:   Mat            A;                       /* linear system matrix */
 39:   KSP            ksp;                    /* linear solver context */
 40:   PC             pc;                      /* PC context */
 41:   IS             *is,*is_local;           /* array of index sets that define the subdomains */
 42:   PetscInt       overlap = 1;             /* width of subdomain overlap */
 43:   PetscInt       Nsub;                    /* number of subdomains */
 44:   PetscInt       m = 15,n = 17;          /* mesh dimensions in x- and y- directions */
 45:   PetscInt       M = 2,N = 1;            /* number of subdomains in x- and y- directions */
 46:   PetscInt       i,j,Ii,J,Istart,Iend;
 47:   PetscMPIInt    size;
 48:   PetscBool      flg;
 49:   PetscBool      user_subdomains = PETSC_FALSE;
 50:   PetscScalar    v, one = 1.0;
 51:   PetscReal      e;

 53:   PetscInitialize(&argc,&args,(char*)0,help);
 54:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 55:   PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
 56:   PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
 57:   PetscOptionsGetInt(NULL,NULL,"-Mdomains",&M,NULL);
 58:   PetscOptionsGetInt(NULL,NULL,"-Ndomains",&N,NULL);
 59:   PetscOptionsGetInt(NULL,NULL,"-overlap",&overlap,NULL);
 60:   PetscOptionsGetBool(NULL,NULL,"-user_set_subdomains",&user_subdomains,NULL);

 62:   /* -------------------------------------------------------------------
 63:          Compute the matrix and right-hand-side vector that define
 64:          the linear system, Ax = b.
 65:      ------------------------------------------------------------------- */

 67:   /*
 68:      Assemble the matrix for the five point stencil, YET AGAIN
 69:   */
 70:   MatCreate(PETSC_COMM_WORLD,&A);
 71:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
 72:   MatSetFromOptions(A);
 73:   MatSetUp(A);
 74:   MatGetOwnershipRange(A,&Istart,&Iend);
 75:   for (Ii=Istart; Ii<Iend; Ii++) {
 76:     v = -1.0; i = Ii/n; j = Ii - i*n;
 77:     if (i>0)   {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 78:     if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 79:     if (j>0)   {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 80:     if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 81:     v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
 82:   }
 83:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
 84:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

 86:   /*
 87:      Create and set vectors
 88:   */
 89:   MatCreateVecs(A,&u,&b);
 90:   VecDuplicate(u,&x);
 91:   VecSet(u,one);
 92:   MatMult(A,u,b);

 94:   /*
 95:      Create linear solver context
 96:   */
 97:   KSPCreate(PETSC_COMM_WORLD,&ksp);

 99:   /*
100:      Set operators. Here the matrix that defines the linear system
101:      also serves as the preconditioning matrix.
102:   */
103:   KSPSetOperators(ksp,A,A);

105:   /*
106:      Set the default preconditioner for this program to be ASM
107:   */
108:   KSPGetPC(ksp,&pc);
109:   PCSetType(pc,PCASM);

111:   /* -------------------------------------------------------------------
112:                   Define the problem decomposition
113:      ------------------------------------------------------------------- */

115:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116:        Basic method, should be sufficient for the needs of many users.
117:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

119:      Set the overlap, using the default PETSc decomposition via
120:          PCASMSetOverlap(pc,overlap);
121:      Could instead use the option -pc_asm_overlap <ovl>

123:      Set the total number of blocks via -pc_asm_blocks <blks>
124:      Note:  The ASM default is to use 1 block per processor.  To
125:      experiment on a single processor with various overlaps, you
126:      must specify use of multiple blocks!
127:   */

129:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130:        More advanced method, setting user-defined subdomains
131:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

133:      Firstly, create index sets that define the subdomains.  The utility
134:      routine PCASMCreateSubdomains2D() is a simple example (that currently
135:      supports 1 processor only!).  More generally, the user should write
136:      a custom routine for a particular problem geometry.

138:      Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
139:      to set the subdomains for the ASM preconditioner.
140:   */

142:   if (!user_subdomains) { /* basic version */
143:     PCASMSetOverlap(pc,overlap);
144:   } else { /* advanced version */
146:     PCASMCreateSubdomains2D(m,n,M,N,1,overlap,&Nsub,&is,&is_local);
147:     PCASMSetLocalSubdomains(pc,Nsub,is,is_local);
148:     flg  = PETSC_FALSE;
149:     PetscOptionsGetBool(NULL,NULL,"-subdomain_view",&flg,NULL);
150:     if (flg) {
151:       PetscPrintf(PETSC_COMM_SELF,"Nmesh points: %D x %D; subdomain partition: %D x %D; overlap: %D; Nsub: %D\n",m,n,M,N,overlap,Nsub);
152:       PetscPrintf(PETSC_COMM_SELF,"IS:\n");
153:       for (i=0; i<Nsub; i++) {
154:         PetscPrintf(PETSC_COMM_SELF,"  IS[%D]\n",i);
155:         ISView(is[i],PETSC_VIEWER_STDOUT_SELF);
156:       }
157:       PetscPrintf(PETSC_COMM_SELF,"IS_local:\n");
158:       for (i=0; i<Nsub; i++) {
159:         PetscPrintf(PETSC_COMM_SELF,"  IS_local[%D]\n",i);
160:         ISView(is_local[i],PETSC_VIEWER_STDOUT_SELF);
161:       }
162:     }
163:   }

165:   /* -------------------------------------------------------------------
166:                 Set the linear solvers for the subblocks
167:      ------------------------------------------------------------------- */

169:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
170:        Basic method, should be sufficient for the needs of most users.
171:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

173:      By default, the ASM preconditioner uses the same solver on each
174:      block of the problem.  To set the same solver options on all blocks,
175:      use the prefix -sub before the usual PC and KSP options, e.g.,
176:           -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4

178:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179:         Advanced method, setting different solvers for various blocks.
180:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

182:      Note that each block's KSP context is completely independent of
183:      the others, and the full range of uniprocessor KSP options is
184:      available for each block.

186:      - Use PCASMGetSubKSP() to extract the array of KSP contexts for
187:        the local blocks.
188:      - See ex7.c for a simple example of setting different linear solvers
189:        for the individual blocks for the block Jacobi method (which is
190:        equivalent to the ASM method with zero overlap).
191:   */

193:   flg  = PETSC_FALSE;
194:   PetscOptionsGetBool(NULL,NULL,"-user_set_subdomain_solvers",&flg,NULL);
195:   if (flg) {
196:     KSP       *subksp;        /* array of KSP contexts for local subblocks */
197:     PetscInt  nlocal,first;   /* number of local subblocks, first local subblock */
198:     PC        subpc;          /* PC context for subblock */
199:     PetscBool isasm;

201:     PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");

203:     /*
204:        Set runtime options
205:     */
206:     KSPSetFromOptions(ksp);

208:     /*
209:        Flag an error if PCTYPE is changed from the runtime options
210:      */
211:     PetscObjectTypeCompare((PetscObject)pc,PCASM,&isasm);

214:     /*
215:        Call KSPSetUp() to set the block Jacobi data structures (including
216:        creation of an internal KSP context for each block).

218:        Note: KSPSetUp() MUST be called before PCASMGetSubKSP().
219:     */
220:     KSPSetUp(ksp);

222:     /*
223:        Extract the array of KSP contexts for the local blocks
224:     */
225:     PCASMGetSubKSP(pc,&nlocal,&first,&subksp);

227:     /*
228:        Loop over the local blocks, setting various KSP options
229:        for each block.
230:     */
231:     for (i=0; i<nlocal; i++) {
232:       KSPGetPC(subksp[i],&subpc);
233:       PCSetType(subpc,PCILU);
234:       KSPSetType(subksp[i],KSPGMRES);
235:       KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
236:     }
237:   } else {
238:     /*
239:        Set runtime options
240:     */
241:     KSPSetFromOptions(ksp);
242:   }

244:   /* -------------------------------------------------------------------
245:                       Solve the linear system
246:      ------------------------------------------------------------------- */

248:   KSPSolve(ksp,b,x);

250:   /* -------------------------------------------------------------------
251:                       Compare result to the exact solution
252:      ------------------------------------------------------------------- */
253:   VecAXPY(x,-1.0,u);
254:   VecNorm(x,NORM_INFINITY, &e);

256:   flg  = PETSC_FALSE;
257:   PetscOptionsGetBool(NULL,NULL,"-print_error",&flg,NULL);
258:   if (flg) {
259:     PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n",(double) e);
260:   }

262:   /*
263:      Free work space.  All PETSc objects should be destroyed when they
264:      are no longer needed.
265:   */

267:   if (user_subdomains) {
268:     for (i=0; i<Nsub; i++) {
269:       ISDestroy(&is[i]);
270:       ISDestroy(&is_local[i]);
271:     }
272:     PetscFree(is);
273:     PetscFree(is_local);
274:   }
275:   KSPDestroy(&ksp);
276:   VecDestroy(&u);
277:   VecDestroy(&x);
278:   VecDestroy(&b);
279:   MatDestroy(&A);
280:   PetscFinalize();
281:   return 0;
282: }

284: /*TEST

286:    test:
287:       suffix: 1
288:       args: -print_error

290: TEST*/