Actual source code: ex8.c

  1: static char help[] = "Demonstrates using a local ordering to set values into a parallel vector.\n\n";

  3: /*
  4:   Include "petscvec.h" so that we can use vectors.  Note that this file
  5:   automatically includes:
  6:      petscsys.h       - base PETSc routines   petscis.h     - index sets
  7:      petscviewer.h - viewers
  8: */
  9: #include <petscvec.h>

 11: int main(int argc, char **argv)
 12: {
 13:   PetscMPIInt rank;
 14:   PetscInt    i, ng, *gindices, rstart, rend, M;
 15:   PetscScalar one = 1.0;
 16:   Vec         x;

 18:   PetscFunctionBeginUser;
 19:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 20:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));

 22:   /*
 23:      Create a parallel vector.
 24:       - In this case, we specify the size of each processor's local
 25:         portion, and PETSc computes the global size.  Alternatively,
 26:         PETSc could determine the vector's distribution if we specify
 27:         just the global size.
 28:   */
 29:   PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
 30:   PetscCall(VecSetSizes(x, rank + 1, PETSC_DECIDE));
 31:   PetscCall(VecSetFromOptions(x));
 32:   PetscCall(VecSet(x, one));

 34:   /*
 35:      Set the local to global ordering for the vector. Each processor
 36:      generates a list of the global indices for each local index. Note that
 37:      the local indices are just whatever is convenient for a particular application.
 38:      In this case we treat the vector as lying on a one dimensional grid and
 39:      have one ghost point on each end of the blocks owned by each processor.
 40:   */

 42:   PetscCall(VecGetSize(x, &M));
 43:   PetscCall(VecGetOwnershipRange(x, &rstart, &rend));
 44:   ng = rend - rstart + 2;
 45:   PetscCall(PetscMalloc1(ng, &gindices));
 46:   gindices[0] = rstart - 1;
 47:   for (i = 0; i < ng - 1; i++) gindices[i + 1] = gindices[i] + 1;
 48:   /* map the first and last point as periodic */
 49:   if (gindices[0] == -1) gindices[0] = M - 1;
 50:   if (gindices[ng - 1] == M) gindices[ng - 1] = 0;
 51:   {
 52:     ISLocalToGlobalMapping ltog;
 53:     PetscCall(ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, 1, ng, gindices, PETSC_COPY_VALUES, &ltog));
 54:     PetscCall(VecSetLocalToGlobalMapping(x, ltog));
 55:     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
 56:   }
 57:   PetscCall(PetscFree(gindices));

 59:   /*
 60:      Set the vector elements.
 61:       - In this case set the values using the local ordering
 62:       - Each processor can contribute any vector entries,
 63:         regardless of which processor "owns" them; any nonlocal
 64:         contributions will be transferred to the appropriate processor
 65:         during the assembly process.
 66:       - In this example, the flag ADD_VALUES indicates that all
 67:         contributions will be added together.
 68:   */
 69:   for (i = 0; i < ng; i++) PetscCall(VecSetValuesLocal(x, 1, &i, &one, ADD_VALUES));

 71:   /*
 72:      Assemble vector, using the 2-step process:
 73:        VecAssemblyBegin(), VecAssemblyEnd()
 74:      Computations can be done while messages are in transition
 75:      by placing code between these two statements.
 76:   */
 77:   PetscCall(VecAssemblyBegin(x));
 78:   PetscCall(VecAssemblyEnd(x));

 80:   /*
 81:       View the vector; then destroy it.
 82:   */
 83:   PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
 84:   PetscCall(VecDestroy(&x));

 86:   PetscCall(PetscFinalize());
 87:   return 0;
 88: }

 90: /*TEST

 92:      test:
 93:        nsize: 4

 95: TEST*/