Actual source code: ex64.c

  1: /*
  2:  *
  3:  *  Created on: Aug 10, 2015
  4:  *      Author: Fande Kong  <fdkong.jd@gmail.com>
  5:  */

  7: static char help[] = "Illustrates use of the preconditioner GASM.\n \
  8:    using hierarchical partitioning and MatIncreaseOverlapSplit \
  9:         -pc_gasm_total_subdomains\n \
 10:         -pc_gasm_print_subdomains\n \n";

 12: /*
 13:    Note:  This example focuses on setting the subdomains for the GASM
 14:    preconditioner for a problem on a 2D rectangular grid.  See ex1.c
 15:    and ex2.c for more detailed comments on the basic usage of KSP
 16:    (including working with matrices and vectors).

 18:    The GASM preconditioner is fully parallel.  The user-space routine
 19:    CreateSubdomains2D that computes the domain decomposition is also parallel
 20:    and attempts to generate both subdomains straddling processors and multiple
 21:    domains per processor.

 23:    This matrix in this linear system arises from the discretized Laplacian,
 24:    and thus is not very interesting in terms of experimenting with variants
 25:    of the GASM preconditioner.
 26: */

 28: /*
 29:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 30:   automatically includes:
 31:      petscsys.h    - base PETSc routines   petscvec.h - vectors
 32:      petscmat.h    - matrices
 33:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 34:      petscviewer.h - viewers               petscpc.h  - preconditioners
 35: */
 36: #include <petscksp.h>
 37: #include <petscmat.h>

 39: int main(int argc, char **args)
 40: {
 41:   Vec             x, b, u; /* approx solution, RHS, exact solution */
 42:   Mat             A, perA; /* linear system matrix */
 43:   KSP             ksp;     /* linear solver context */
 44:   PC              pc;      /* PC context */
 45:   PetscInt        overlap; /* width of subdomain overlap */
 46:   PetscInt        m, n;    /* mesh dimensions in x- and y- directions */
 47:   PetscInt        M, N;    /* number of subdomains in x- and y- directions */
 48:   PetscInt        i, j, Ii, J, Istart, Iend;
 49:   PetscMPIInt     size;
 50:   PetscBool       flg;
 51:   PetscBool       user_set_subdomains;
 52:   PetscScalar     v, one = 1.0;
 53:   MatPartitioning part;
 54:   IS              coarseparts, fineparts;
 55:   IS              is, isn, isrows;
 56:   MPI_Comm        comm;
 57:   PetscReal       e;

 59:   PetscFunctionBeginUser;
 60:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 61:   comm = PETSC_COMM_WORLD;
 62:   PetscCallMPI(MPI_Comm_size(comm, &size));
 63:   PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "ex62", "PC");
 64:   m = 15;
 65:   PetscCall(PetscOptionsInt("-M", "Number of mesh points in the x-direction", "PCGASMCreateSubdomains2D", m, &m, NULL));
 66:   n = 17;
 67:   PetscCall(PetscOptionsInt("-N", "Number of mesh points in the y-direction", "PCGASMCreateSubdomains2D", n, &n, NULL));
 68:   user_set_subdomains = PETSC_FALSE;
 69:   PetscCall(PetscOptionsBool("-user_set_subdomains", "Use the user-specified 2D tiling of mesh by subdomains", "PCGASMCreateSubdomains2D", user_set_subdomains, &user_set_subdomains, NULL));
 70:   M = 2;
 71:   PetscCall(PetscOptionsInt("-Mdomains", "Number of subdomain tiles in the x-direction", "PCGASMSetSubdomains2D", M, &M, NULL));
 72:   N = 1;
 73:   PetscCall(PetscOptionsInt("-Ndomains", "Number of subdomain tiles in the y-direction", "PCGASMSetSubdomains2D", N, &N, NULL));
 74:   overlap = 1;
 75:   PetscCall(PetscOptionsInt("-overlap", "Size of tile overlap.", "PCGASMSetSubdomains2D", overlap, &overlap, NULL));
 76:   PetscOptionsEnd();

 78:   /* -------------------------------------------------------------------
 79:          Compute the matrix and right-hand-side vector that define
 80:          the linear system, Ax = b.
 81:      ------------------------------------------------------------------- */

 83:   /*
 84:      Assemble the matrix for the five point stencil, YET AGAIN
 85:   */
 86:   PetscCall(MatCreate(comm, &A));
 87:   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
 88:   PetscCall(MatSetFromOptions(A));
 89:   PetscCall(MatSetUp(A));
 90:   PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));
 91:   for (Ii = Istart; Ii < Iend; Ii++) {
 92:     v = -1.0;
 93:     i = Ii / n;
 94:     j = Ii - i * n;
 95:     if (i > 0) {
 96:       J = Ii - n;
 97:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 98:     }
 99:     if (i < m - 1) {
100:       J = Ii + n;
101:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
102:     }
103:     if (j > 0) {
104:       J = Ii - 1;
105:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
106:     }
107:     if (j < n - 1) {
108:       J = Ii + 1;
109:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
110:     }
111:     v = 4.0;
112:     PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
113:   }
114:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
115:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

117:   /*
118:     Partition the graph of the matrix
119:   */
120:   PetscCall(MatPartitioningCreate(comm, &part));
121:   PetscCall(MatPartitioningSetAdjacency(part, A));
122:   PetscCall(MatPartitioningSetType(part, MATPARTITIONINGHIERARCH));
123:   PetscCall(MatPartitioningHierarchicalSetNcoarseparts(part, 2));
124:   PetscCall(MatPartitioningHierarchicalSetNfineparts(part, 2));
125:   PetscCall(MatPartitioningSetFromOptions(part));
126:   /* get new processor owner number of each vertex */
127:   PetscCall(MatPartitioningApply(part, &is));
128:   /* get coarse parts according to which we rearrange the matrix */
129:   PetscCall(MatPartitioningHierarchicalGetCoarseparts(part, &coarseparts));
130:   /* fine parts are used with coarse parts */
131:   PetscCall(MatPartitioningHierarchicalGetFineparts(part, &fineparts));
132:   /* get new global number of each old global number */
133:   PetscCall(ISPartitioningToNumbering(is, &isn));
134:   PetscCall(ISBuildTwoSided(is, NULL, &isrows));
135:   PetscCall(MatCreateSubMatrix(A, isrows, isrows, MAT_INITIAL_MATRIX, &perA));
136:   PetscCall(MatCreateVecs(perA, &b, NULL));
137:   PetscCall(VecSetFromOptions(b));
138:   PetscCall(VecDuplicate(b, &u));
139:   PetscCall(VecDuplicate(b, &x));
140:   PetscCall(VecSet(u, one));
141:   PetscCall(MatMult(perA, u, b));
142:   PetscCall(KSPCreate(comm, &ksp));
143:   /*
144:      Set operators. Here the matrix that defines the linear system
145:      also serves as the preconditioning matrix.
146:   */
147:   PetscCall(KSPSetOperators(ksp, perA, perA));

149:   /*
150:      Set the default preconditioner for this program to be GASM
151:   */
152:   PetscCall(KSPGetPC(ksp, &pc));
153:   PetscCall(PCSetType(pc, PCGASM));
154:   PetscCall(KSPSetFromOptions(ksp));
155:   /* -------------------------------------------------------------------
156:                       Solve the linear system
157:      ------------------------------------------------------------------- */

159:   PetscCall(KSPSolve(ksp, b, x));
160:   /* -------------------------------------------------------------------
161:                       Compare result to the exact solution
162:      ------------------------------------------------------------------- */
163:   PetscCall(VecAXPY(x, -1.0, u));
164:   PetscCall(VecNorm(x, NORM_INFINITY, &e));

166:   flg = PETSC_FALSE;
167:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-print_error", &flg, NULL));
168:   if (flg) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n", (double)e));
169:   /*
170:      Free work space.  All PETSc objects should be destroyed when they
171:      are no longer needed.
172:   */
173:   PetscCall(KSPDestroy(&ksp));
174:   PetscCall(VecDestroy(&u));
175:   PetscCall(VecDestroy(&x));
176:   PetscCall(VecDestroy(&b));
177:   PetscCall(MatDestroy(&A));
178:   PetscCall(MatDestroy(&perA));
179:   PetscCall(ISDestroy(&is));
180:   PetscCall(ISDestroy(&coarseparts));
181:   PetscCall(ISDestroy(&fineparts));
182:   PetscCall(ISDestroy(&isrows));
183:   PetscCall(ISDestroy(&isn));
184:   PetscCall(MatPartitioningDestroy(&part));
185:   PetscCall(PetscFinalize());
186:   return 0;
187: }

189: /*TEST

191:    test:
192:       nsize: 4
193:       requires: superlu_dist parmetis
194:       args: -ksp_monitor -pc_gasm_overlap 1 -sub_pc_type lu -sub_pc_factor_mat_solver_type superlu_dist -pc_gasm_total_subdomains 2

196: TEST*/