Actual source code: ex8.c


  2: static char help[] = "Illustrates use of the preconditioner ASM.\n\
  3: The Additive Schwarz Method for solving a linear system in parallel with KSP.  The\n\
  4: code indicates the procedure for setting user-defined subdomains.  Input\n\
  5: parameters include:\n\
  6:   -user_set_subdomain_solvers:  User explicitly sets subdomain solvers\n\
  7:   -user_set_subdomains:  Activate user-defined subdomains\n\n";

  9: /*
 10:    Note:  This example focuses on setting the subdomains for the ASM
 11:    preconditioner for a problem on a 2D rectangular grid.  See ex1.c
 12:    and ex2.c for more detailed comments on the basic usage of KSP
 13:    (including working with matrices and vectors).

 15:    The ASM preconditioner is fully parallel, but currently the routine
 16:    PCASMCreateSubdomains2D(), which is used in this example to demonstrate
 17:    user-defined subdomains (activated via -user_set_subdomains), is
 18:    uniprocessor only.

 20:    This matrix in this linear system arises from the discretized Laplacian,
 21:    and thus is not very interesting in terms of experimenting with variants
 22:    of the ASM preconditioner.
 23: */

 25: /*
 26:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 27:   automatically includes:
 28:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 29:      petscmat.h - matrices
 30:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 31:      petscviewer.h - viewers               petscpc.h  - preconditioners
 32: */
 33: #include <petscksp.h>

 35: int main(int argc, char **args)
 36: {
 37:   Vec         x, b, u;        /* approx solution, RHS, exact solution */
 38:   Mat         A;              /* linear system matrix */
 39:   KSP         ksp;            /* linear solver context */
 40:   PC          pc;             /* PC context */
 41:   IS         *is, *is_local;  /* array of index sets that define the subdomains */
 42:   PetscInt    overlap = 1;    /* width of subdomain overlap */
 43:   PetscInt    Nsub;           /* number of subdomains */
 44:   PetscInt    m = 15, n = 17; /* mesh dimensions in x- and y- directions */
 45:   PetscInt    M = 2, N = 1;   /* number of subdomains in x- and y- directions */
 46:   PetscInt    i, j, Ii, J, Istart, Iend;
 47:   PetscMPIInt size;
 48:   PetscBool   flg;
 49:   PetscBool   user_subdomains = PETSC_FALSE;
 50:   PetscScalar v, one = 1.0;
 51:   PetscReal   e;

 54:   PetscInitialize(&argc, &args, (char *)0, help);
 55:   MPI_Comm_size(PETSC_COMM_WORLD, &size);
 56:   PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL);
 57:   PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL);
 58:   PetscOptionsGetInt(NULL, NULL, "-Mdomains", &M, NULL);
 59:   PetscOptionsGetInt(NULL, NULL, "-Ndomains", &N, NULL);
 60:   PetscOptionsGetInt(NULL, NULL, "-overlap", &overlap, NULL);
 61:   PetscOptionsGetBool(NULL, NULL, "-user_set_subdomains", &user_subdomains, NULL);

 63:   /* -------------------------------------------------------------------
 64:          Compute the matrix and right-hand-side vector that define
 65:          the linear system, Ax = b.
 66:      ------------------------------------------------------------------- */

 68:   /*
 69:      Assemble the matrix for the five point stencil, YET AGAIN
 70:   */
 71:   MatCreate(PETSC_COMM_WORLD, &A);
 72:   MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n);
 73:   MatSetFromOptions(A);
 74:   MatSetUp(A);
 75:   MatGetOwnershipRange(A, &Istart, &Iend);
 76:   for (Ii = Istart; Ii < Iend; Ii++) {
 77:     v = -1.0;
 78:     i = Ii / n;
 79:     j = Ii - i * n;
 80:     if (i > 0) {
 81:       J = Ii - n;
 82:       MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
 83:     }
 84:     if (i < m - 1) {
 85:       J = Ii + n;
 86:       MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
 87:     }
 88:     if (j > 0) {
 89:       J = Ii - 1;
 90:       MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
 91:     }
 92:     if (j < n - 1) {
 93:       J = Ii + 1;
 94:       MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
 95:     }
 96:     v = 4.0;
 97:     MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES);
 98:   }
 99:   MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
100:   MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);

102:   /*
103:      Create and set vectors
104:   */
105:   MatCreateVecs(A, &u, &b);
106:   VecDuplicate(u, &x);
107:   VecSet(u, one);
108:   MatMult(A, u, b);

110:   /*
111:      Create linear solver context
112:   */
113:   KSPCreate(PETSC_COMM_WORLD, &ksp);

115:   /*
116:      Set operators. Here the matrix that defines the linear system
117:      also serves as the preconditioning matrix.
118:   */
119:   KSPSetOperators(ksp, A, A);

121:   /*
122:      Set the default preconditioner for this program to be ASM
123:   */
124:   KSPGetPC(ksp, &pc);
125:   PCSetType(pc, PCASM);

127:   /* -------------------------------------------------------------------
128:                   Define the problem decomposition
129:      ------------------------------------------------------------------- */

131:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132:        Basic method, should be sufficient for the needs of many users.
133:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

135:      Set the overlap, using the default PETSc decomposition via
136:          PCASMSetOverlap(pc,overlap);
137:      Could instead use the option -pc_asm_overlap <ovl>

139:      Set the total number of blocks via -pc_asm_blocks <blks>
140:      Note:  The ASM default is to use 1 block per processor.  To
141:      experiment on a single processor with various overlaps, you
142:      must specify use of multiple blocks!
143:   */

145:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146:        More advanced method, setting user-defined subdomains
147:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

149:      Firstly, create index sets that define the subdomains.  The utility
150:      routine PCASMCreateSubdomains2D() is a simple example (that currently
151:      supports 1 processor only!).  More generally, the user should write
152:      a custom routine for a particular problem geometry.

154:      Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
155:      to set the subdomains for the ASM preconditioner.
156:   */

158:   if (!user_subdomains) { /* basic version */
159:     PCASMSetOverlap(pc, overlap);
160:   } else { /* advanced version */
162:     PCASMCreateSubdomains2D(m, n, M, N, 1, overlap, &Nsub, &is, &is_local);
163:     PCASMSetLocalSubdomains(pc, Nsub, is, is_local);
164:     flg = PETSC_FALSE;
165:     PetscOptionsGetBool(NULL, NULL, "-subdomain_view", &flg, NULL);
166:     if (flg) {
167:       PetscPrintf(PETSC_COMM_SELF, "Nmesh points: %" PetscInt_FMT " x %" PetscInt_FMT "; subdomain partition: %" PetscInt_FMT " x %" PetscInt_FMT "; overlap: %" PetscInt_FMT "; Nsub: %" PetscInt_FMT "\n", m, n, M, N, overlap, Nsub);
168:       PetscPrintf(PETSC_COMM_SELF, "IS:\n");
169:       for (i = 0; i < Nsub; i++) {
170:         PetscPrintf(PETSC_COMM_SELF, "  IS[%" PetscInt_FMT "]\n", i);
171:         ISView(is[i], PETSC_VIEWER_STDOUT_SELF);
172:       }
173:       PetscPrintf(PETSC_COMM_SELF, "IS_local:\n");
174:       for (i = 0; i < Nsub; i++) {
175:         PetscPrintf(PETSC_COMM_SELF, "  IS_local[%" PetscInt_FMT "]\n", i);
176:         ISView(is_local[i], PETSC_VIEWER_STDOUT_SELF);
177:       }
178:     }
179:   }

181:   /* -------------------------------------------------------------------
182:                 Set the linear solvers for the subblocks
183:      ------------------------------------------------------------------- */

185:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
186:        Basic method, should be sufficient for the needs of most users.
187:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

189:      By default, the ASM preconditioner uses the same solver on each
190:      block of the problem.  To set the same solver options on all blocks,
191:      use the prefix -sub before the usual PC and KSP options, e.g.,
192:           -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4

194:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195:         Advanced method, setting different solvers for various blocks.
196:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

198:      Note that each block's KSP context is completely independent of
199:      the others, and the full range of uniprocessor KSP options is
200:      available for each block.

202:      - Use PCASMGetSubKSP() to extract the array of KSP contexts for
203:        the local blocks.
204:      - See ex7.c for a simple example of setting different linear solvers
205:        for the individual blocks for the block Jacobi method (which is
206:        equivalent to the ASM method with zero overlap).
207:   */

209:   flg = PETSC_FALSE;
210:   PetscOptionsGetBool(NULL, NULL, "-user_set_subdomain_solvers", &flg, NULL);
211:   if (flg) {
212:     KSP      *subksp;        /* array of KSP contexts for local subblocks */
213:     PetscInt  nlocal, first; /* number of local subblocks, first local subblock */
214:     PC        subpc;         /* PC context for subblock */
215:     PetscBool isasm;

217:     PetscPrintf(PETSC_COMM_WORLD, "User explicitly sets subdomain solvers.\n");

219:     /*
220:        Set runtime options
221:     */
222:     KSPSetFromOptions(ksp);

224:     /*
225:        Flag an error if PCTYPE is changed from the runtime options
226:      */
227:     PetscObjectTypeCompare((PetscObject)pc, PCASM, &isasm);

230:     /*
231:        Call KSPSetUp() to set the block Jacobi data structures (including
232:        creation of an internal KSP context for each block).

234:        Note: KSPSetUp() MUST be called before PCASMGetSubKSP().
235:     */
236:     KSPSetUp(ksp);

238:     /*
239:        Extract the array of KSP contexts for the local blocks
240:     */
241:     PCASMGetSubKSP(pc, &nlocal, &first, &subksp);

243:     /*
244:        Loop over the local blocks, setting various KSP options
245:        for each block.
246:     */
247:     for (i = 0; i < nlocal; i++) {
248:       KSPGetPC(subksp[i], &subpc);
249:       PCSetType(subpc, PCILU);
250:       KSPSetType(subksp[i], KSPGMRES);
251:       KSPSetTolerances(subksp[i], 1.e-7, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT);
252:     }
253:   } else {
254:     /*
255:        Set runtime options
256:     */
257:     KSPSetFromOptions(ksp);
258:   }

260:   /* -------------------------------------------------------------------
261:                       Solve the linear system
262:      ------------------------------------------------------------------- */

264:   KSPSolve(ksp, b, x);

266:   /* -------------------------------------------------------------------
267:                       Compare result to the exact solution
268:      ------------------------------------------------------------------- */
269:   VecAXPY(x, -1.0, u);
270:   VecNorm(x, NORM_INFINITY, &e);

272:   flg = PETSC_FALSE;
273:   PetscOptionsGetBool(NULL, NULL, "-print_error", &flg, NULL);
274:   if (flg) PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n", (double)e);

276:   /*
277:      Free work space.  All PETSc objects should be destroyed when they
278:      are no longer needed.
279:   */

281:   if (user_subdomains) {
282:     for (i = 0; i < Nsub; i++) {
283:       ISDestroy(&is[i]);
284:       ISDestroy(&is_local[i]);
285:     }
286:     PetscFree(is);
287:     PetscFree(is_local);
288:   }
289:   KSPDestroy(&ksp);
290:   VecDestroy(&u);
291:   VecDestroy(&x);
292:   VecDestroy(&b);
293:   MatDestroy(&A);
294:   PetscFinalize();
295:   return 0;
296: }

298: /*TEST

300:    test:
301:       suffix: 1
302:       args: -print_error

304: TEST*/