Actual source code: ex25.c

  1: static char help[] = "Takes a patch of a large DMDA vector to one process.\n\n";

  3: #include <petscdm.h>
  4: #include <petscdmda.h>
  5: #include <petscdmpatch.h>
  6: #include <petscsf.h>

  8: typedef struct {
  9:   PetscScalar x, y;
 10: } Field;

 12: int main(int argc, char **argv)
 13: {
 14:   Vec         xy, sxy;
 15:   DM          da, sda = NULL;
 16:   PetscSF     sf;
 17:   PetscInt    m = 10, n = 10, dof = 2;
 18:   MatStencil  lower = {0, 3, 2, 0}, upper = {0, 7, 8, 0}; /* These are in the order of the z, y, x, logical coordinates, the fourth entry is ignored */
 19:   MPI_Comm    comm;
 20:   PetscMPIInt rank;

 22:   PetscFunctionBeginUser;
 23:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));

 25:   /* create the large DMDA and set coordinates (which we will copy down to the small DA). */
 26:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, m, n, PETSC_DECIDE, PETSC_DECIDE, dof, 1, 0, 0, &da));
 27:   PetscCall(DMSetFromOptions(da));
 28:   PetscCall(DMSetUp(da));
 29:   PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
 30:   /* Just as a simple example we use the coordinates as the variables in the vectors we wish to examine. */
 31:   PetscCall(DMGetCoordinates(da, &xy));
 32:   /* The vector entries are displayed in the "natural" ordering on the two dimensional grid; interlaced x and y with the x variable increasing more rapidly than the y */
 33:   PetscCall(VecView(xy, 0));

 35:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 36:   if (rank == 0) comm = MPI_COMM_SELF;
 37:   else comm = MPI_COMM_NULL;

 39:   PetscCall(DMPatchZoom(da, lower, upper, comm, &sda, NULL, &sf));
 40:   if (rank == 0) {
 41:     PetscCall(DMCreateGlobalVector(sda, &sxy));
 42:   } else {
 43:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 0, &sxy));
 44:   }
 45:   /*  A PetscSF can also be used as a VecScatter context */
 46:   PetscCall(VecScatterBegin(sf, xy, sxy, INSERT_VALUES, SCATTER_FORWARD));
 47:   PetscCall(VecScatterEnd(sf, xy, sxy, INSERT_VALUES, SCATTER_FORWARD));
 48:   /* Only rank == 0 has the entries of the patch, so run code only at that rank */
 49:   if (rank == 0) {
 50:     Field       **vars;
 51:     DMDALocalInfo info;
 52:     PetscInt      i, j;
 53:     PetscScalar   sum = 0;

 55:     /* The vector entries of the patch are displayed in the "natural" ordering on the two grid; interlaced x and y with the x variable increasing more rapidly */
 56:     PetscCall(VecView(sxy, PETSC_VIEWER_STDOUT_SELF));
 57:     /* Compute some trivial statistic of the coordinates */
 58:     PetscCall(DMDAGetLocalInfo(sda, &info));
 59:     PetscCall(DMDAVecGetArray(sda, sxy, &vars));
 60:     /* Loop over the patch of the entire domain */
 61:     for (j = info.ys; j < info.ys + info.ym; j++) {
 62:       for (i = info.xs; i < info.xs + info.xm; i++) sum += vars[j][i].x;
 63:     }
 64:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "The sum of the x coordinates is %g\n", (double)PetscRealPart(sum)));
 65:     PetscCall(DMDAVecRestoreArray(sda, sxy, &vars));
 66:   }

 68:   PetscCall(VecDestroy(&sxy));
 69:   PetscCall(PetscSFDestroy(&sf));
 70:   PetscCall(DMDestroy(&sda));
 71:   PetscCall(DMDestroy(&da));
 72:   PetscCall(PetscFinalize());
 73:   return 0;
 74: }

 76: /*TEST

 78:    test:

 80:    test:
 81:      nsize: 4
 82:      suffix: 2

 84: TEST*/