Actual source code: ex51.c

  1: static char help[] = "Tests DMDAVecGetArrayDOF()\n";
  2: #include <petscdm.h>
  3: #include <petscdmda.h>

  5: int main(int argc, char *argv[])
  6: {
  7:   DM              da, daX, daY;
  8:   DMDALocalInfo   info;
  9:   MPI_Comm        commX, commY;
 10:   Vec             basisX, basisY;
 11:   PetscScalar   **arrayX, **arrayY;
 12:   const PetscInt *lx, *ly;
 13:   PetscInt        M = 3, N = 3;
 14:   PetscInt        p     = 1;
 15:   PetscInt        numGP = 3;
 16:   PetscInt        dof   = 2 * (p + 1) * numGP;
 17:   PetscMPIInt     rank, subsize, subrank;

 19:   PetscFunctionBeginUser;
 20:   PetscCall(PetscInitialize(&argc, &argv, 0, help));
 21:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 22:   /* Create 2D DMDA */
 23:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, M, N, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &da));
 24:   PetscCall(DMSetFromOptions(da));
 25:   PetscCall(DMSetUp(da));
 26:   /* Create 1D DMDAs along two directions. */
 27:   PetscCall(DMDAGetOwnershipRanges(da, &lx, &ly, NULL));
 28:   PetscCall(DMDAGetLocalInfo(da, &info));
 29:   /* Partitioning in the X direction makes a subcomm extending in the Y direction and vice-versa. */
 30:   PetscCall(DMDAGetProcessorSubsets(da, DM_X, &commY));
 31:   PetscCall(DMDAGetProcessorSubsets(da, DM_Y, &commX));
 32:   PetscCallMPI(MPI_Comm_size(commX, &subsize));
 33:   PetscCallMPI(MPI_Comm_rank(commX, &subrank));
 34:   PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[%d]X subrank: %d subsize: %d\n", rank, subrank, subsize));
 35:   PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
 36:   PetscCallMPI(MPI_Comm_size(commY, &subsize));
 37:   PetscCallMPI(MPI_Comm_rank(commY, &subrank));
 38:   PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[%d]Y subrank: %d subsize: %d\n", rank, subrank, subsize));
 39:   PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
 40:   PetscCall(DMDACreate1d(commX, DM_BOUNDARY_NONE, info.mx, dof, 1, lx, &daX));
 41:   PetscCall(DMSetUp(daX));
 42:   PetscCall(DMDACreate1d(commY, DM_BOUNDARY_NONE, info.my, dof, 1, ly, &daY));
 43:   PetscCall(DMSetUp(daY));
 44:   /* Create 1D vectors for basis functions */
 45:   PetscCall(DMGetGlobalVector(daX, &basisX));
 46:   PetscCall(DMGetGlobalVector(daY, &basisY));
 47:   /* Extract basis functions */
 48:   PetscCall(DMDAVecGetArrayDOF(daX, basisX, &arrayX));
 49:   PetscCall(DMDAVecGetArrayDOF(daY, basisY, &arrayY));
 50:   /*arrayX[i][ndof]; */
 51:   /*arrayY[j][ndof]; */
 52:   PetscCall(DMDAVecRestoreArrayDOF(daX, basisX, &arrayX));
 53:   PetscCall(DMDAVecRestoreArrayDOF(daY, basisY, &arrayY));
 54:   /* Return basis vectors */
 55:   PetscCall(DMRestoreGlobalVector(daX, &basisX));
 56:   PetscCall(DMRestoreGlobalVector(daY, &basisY));
 57:   /* Cleanup */
 58:   PetscCallMPI(MPI_Comm_free(&commX));
 59:   PetscCallMPI(MPI_Comm_free(&commY));
 60:   PetscCall(DMDestroy(&daX));
 61:   PetscCall(DMDestroy(&daY));
 62:   PetscCall(DMDestroy(&da));
 63:   PetscCall(PetscFinalize());
 64:   return 0;
 65: }

 67: /*TEST

 69:    test:
 70:       nsize: 2

 72: TEST*/