Actual source code: ex14.c

  1: static char help[] = "Tests DMCreateDomainDecomposition.\n\n";

  3: /*
  4: Use the options
  5:      -da_grid_x <nx> - number of grid points in x direction, if M < 0
  6:      -da_grid_y <ny> - number of grid points in y direction, if N < 0
  7:      -da_processors_x <MX> number of processors in x directio
  8:      -da_processors_y <MY> number of processors in x direction
  9: */

 11: #include <petscdm.h>
 12: #include <petscdmda.h>

 14: PetscErrorCode FillLocalSubdomain(DM da, Vec gvec)
 15: {
 16:   DMDALocalInfo info;
 17:   PetscMPIInt   rank;
 18:   PetscInt      i, j, k, l;

 20:   PetscFunctionBeginUser;
 21:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 22:   PetscCall(DMDAGetLocalInfo(da, &info));

 24:   if (info.dim == 3) {
 25:     PetscScalar ***g;
 26:     PetscCall(DMDAVecGetArray(da, gvec, &g));
 27:     /* loop over ghosts */
 28:     for (k = info.zs; k < info.zs + info.zm; k++) {
 29:       for (j = info.ys; j < info.ys + info.ym; j++) {
 30:         for (i = info.xs; i < info.xs + info.xm; i++) {
 31:           g[k][j][info.dof * i + 0] = i;
 32:           g[k][j][info.dof * i + 1] = j;
 33:           g[k][j][info.dof * i + 2] = k;
 34:         }
 35:       }
 36:     }
 37:     PetscCall(DMDAVecRestoreArray(da, gvec, &g));
 38:   }
 39:   if (info.dim == 2) {
 40:     PetscScalar **g;
 41:     PetscCall(DMDAVecGetArray(da, gvec, &g));
 42:     /* loop over ghosts */
 43:     for (j = info.ys; j < info.ys + info.ym; j++) {
 44:       for (i = info.xs; i < info.xs + info.xm; i++) {
 45:         for (l = 0; l < info.dof; l++) {
 46:           g[j][info.dof * i + 0] = i;
 47:           g[j][info.dof * i + 1] = j;
 48:           g[j][info.dof * i + 2] = rank;
 49:         }
 50:       }
 51:     }
 52:     PetscCall(DMDAVecRestoreArray(da, gvec, &g));
 53:   }
 54:   PetscFunctionReturn(PETSC_SUCCESS);
 55: }

 57: int main(int argc, char **argv)
 58: {
 59:   DM            da, *subda;
 60:   PetscInt      i, dim = 3, dof = 3;
 61:   PetscInt      M = 25, N = 25, P = 25;
 62:   PetscMPIInt   size, rank;
 63:   Vec           v;
 64:   Vec           slvec, sgvec;
 65:   IS           *ois, *iis;
 66:   VecScatter    oscata;
 67:   VecScatter   *iscat, *oscat, *gscat;
 68:   DMDALocalInfo info;
 69:   PetscBool     patchis_offproc = PETSC_TRUE;

 71:   PetscFunctionBeginUser;
 72:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 73:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-dim", &dim, NULL));
 74:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-dof", &dof, NULL));

 76:   /* Create distributed array and get vectors */
 77:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 78:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 79:   if (dim == 2) {
 80:     PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, M, N, PETSC_DECIDE, PETSC_DECIDE, dof, 1, NULL, NULL, &da));
 81:   } else if (dim == 3) {
 82:     PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, M, N, P, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, dof, 1, NULL, NULL, NULL, &da));
 83:   }
 84:   PetscCall(DMSetFromOptions(da));
 85:   PetscCall(DMSetUp(da));
 86:   PetscCall(DMDAGetLocalInfo(da, &info));

 88:   PetscCall(DMCreateDomainDecomposition(da, NULL, NULL, &iis, &ois, &subda));
 89:   PetscCall(DMCreateDomainDecompositionScatters(da, 1, subda, &iscat, &oscat, &gscat));

 91:   /* TODO: broken for dim=2 */
 92:   if (dim == 3) {
 93:     DMDALocalInfo subinfo;
 94:     MatStencil    lower, upper;
 95:     IS            patchis;
 96:     Vec           smallvec;
 97:     Vec           largevec;
 98:     VecScatter    patchscat;

100:     PetscCall(DMDAGetLocalInfo(subda[0], &subinfo));

102:     lower.i = info.xs;
103:     lower.j = info.ys;
104:     lower.k = info.zs;
105:     upper.i = info.xs + info.xm;
106:     upper.j = info.ys + info.ym;
107:     upper.k = info.zs + info.zm;

109:     /* test the patch IS as a thing to scatter to/from */
110:     PetscCall(DMDACreatePatchIS(da, &lower, &upper, &patchis, patchis_offproc));
111:     PetscCall(DMGetGlobalVector(da, &largevec));

113:     PetscCall(VecCreate(PETSC_COMM_SELF, &smallvec));
114:     PetscCall(VecSetSizes(smallvec, info.dof * (upper.i - lower.i) * (upper.j - lower.j) * (upper.k - lower.k), PETSC_DECIDE));
115:     PetscCall(VecSetFromOptions(smallvec));
116:     PetscCall(VecScatterCreate(smallvec, NULL, largevec, patchis, &patchscat));

118:     PetscCall(FillLocalSubdomain(subda[0], smallvec));
119:     PetscCall(VecSet(largevec, 0));

121:     PetscCall(VecScatterBegin(patchscat, smallvec, largevec, ADD_VALUES, SCATTER_FORWARD));
122:     PetscCall(VecScatterEnd(patchscat, smallvec, largevec, ADD_VALUES, SCATTER_FORWARD));
123:     PetscCall(ISView(patchis, PETSC_VIEWER_STDOUT_WORLD));
124:     PetscCall(VecScatterView(patchscat, PETSC_VIEWER_STDOUT_WORLD));

126:     for (i = 0; i < size; i++) {
127:       if (i == rank) PetscCall(VecView(smallvec, PETSC_VIEWER_STDOUT_SELF));
128:       PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
129:     }

131:     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
132:     PetscCall(VecView(largevec, PETSC_VIEWER_STDOUT_WORLD));

134:     PetscCall(VecDestroy(&smallvec));
135:     PetscCall(DMRestoreGlobalVector(da, &largevec));
136:     PetscCall(ISDestroy(&patchis));
137:     PetscCall(VecScatterDestroy(&patchscat));
138:   }

140:   /* view the various parts */
141:   {
142:     for (i = 0; i < size; i++) {
143:       if (i == rank) {
144:         PetscCall(PetscPrintf(PETSC_COMM_SELF, "Processor %d: \n", i));
145:         PetscCall(DMView(subda[0], PETSC_VIEWER_STDOUT_SELF));
146:       }
147:       PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
148:     }

150:     PetscCall(DMGetLocalVector(subda[0], &slvec));
151:     PetscCall(DMGetGlobalVector(subda[0], &sgvec));
152:     PetscCall(DMGetGlobalVector(da, &v));

154:     /* test filling outer between the big DM and the small ones with the IS scatter*/
155:     PetscCall(VecScatterCreate(v, ois[0], sgvec, NULL, &oscata));

157:     PetscCall(FillLocalSubdomain(subda[0], sgvec));

159:     PetscCall(VecScatterBegin(oscata, sgvec, v, ADD_VALUES, SCATTER_REVERSE));
160:     PetscCall(VecScatterEnd(oscata, sgvec, v, ADD_VALUES, SCATTER_REVERSE));

162:     /* test the local-to-local scatter */

164:     /* fill up the local subdomain and then add them together */
165:     PetscCall(FillLocalSubdomain(da, v));

167:     PetscCall(VecScatterBegin(gscat[0], v, slvec, ADD_VALUES, SCATTER_FORWARD));
168:     PetscCall(VecScatterEnd(gscat[0], v, slvec, ADD_VALUES, SCATTER_FORWARD));

170:     PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));

172:     /* test ghost scattering backwards */

174:     PetscCall(VecSet(v, 0));

176:     PetscCall(VecScatterBegin(gscat[0], slvec, v, ADD_VALUES, SCATTER_REVERSE));
177:     PetscCall(VecScatterEnd(gscat[0], slvec, v, ADD_VALUES, SCATTER_REVERSE));

179:     PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));

181:     /* test overlap scattering backwards */

183:     PetscCall(DMLocalToGlobalBegin(subda[0], slvec, ADD_VALUES, sgvec));
184:     PetscCall(DMLocalToGlobalEnd(subda[0], slvec, ADD_VALUES, sgvec));

186:     PetscCall(VecSet(v, 0));

188:     PetscCall(VecScatterBegin(oscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));
189:     PetscCall(VecScatterEnd(oscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));

191:     PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));

193:     /* test interior scattering backwards */

195:     PetscCall(VecSet(v, 0));

197:     PetscCall(VecScatterBegin(iscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));
198:     PetscCall(VecScatterEnd(iscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));

200:     PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));

202:     /* test matrix allocation */
203:     for (i = 0; i < size; i++) {
204:       if (i == rank) {
205:         Mat m;
206:         PetscCall(PetscPrintf(PETSC_COMM_SELF, "Processor %d: \n", i));
207:         PetscCall(DMSetMatType(subda[0], MATAIJ));
208:         PetscCall(DMCreateMatrix(subda[0], &m));
209:         PetscCall(MatView(m, PETSC_VIEWER_STDOUT_SELF));
210:         PetscCall(MatDestroy(&m));
211:       }
212:       PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
213:     }
214:     PetscCall(DMRestoreLocalVector(subda[0], &slvec));
215:     PetscCall(DMRestoreGlobalVector(subda[0], &sgvec));
216:     PetscCall(DMRestoreGlobalVector(da, &v));
217:   }

219:   PetscCall(DMDestroy(&subda[0]));
220:   PetscCall(ISDestroy(&ois[0]));
221:   PetscCall(ISDestroy(&iis[0]));

223:   PetscCall(VecScatterDestroy(&iscat[0]));
224:   PetscCall(VecScatterDestroy(&oscat[0]));
225:   PetscCall(VecScatterDestroy(&gscat[0]));
226:   PetscCall(VecScatterDestroy(&oscata));

228:   PetscCall(PetscFree(iscat));
229:   PetscCall(PetscFree(oscat));
230:   PetscCall(PetscFree(gscat));
231:   PetscCall(PetscFree(oscata));

233:   PetscCall(PetscFree(subda));
234:   PetscCall(PetscFree(ois));
235:   PetscCall(PetscFree(iis));

237:   PetscCall(DMDestroy(&da));
238:   PetscCall(PetscFinalize());
239:   return 0;
240: }