Actual source code: ex9.c

  1: static char help[] = "Demonstrates use of VecCreateGhost().\n\n";

  3: /*
  4:    Description: Ghost padding is one way to handle local calculations that
  5:       involve values from other processors. VecCreateGhost() provides
  6:       a way to create vectors with extra room at the end of the vector
  7:       array to contain the needed ghost values from other processors,
  8:       vector computations are otherwise unaffected.
  9: */

 11: /*
 12:   Include "petscvec.h" so that we can use vectors.  Note that this file
 13:   automatically includes:
 14:      petscsys.h       - base PETSc routines   petscis.h     - index sets
 15:      petscviewer.h - viewers
 16: */
 17: #include <petscvec.h>

 19: int main(int argc, char **argv)
 20: {
 21:   PetscMPIInt            rank, size;
 22:   PetscInt               nlocal = 6, nghost = 2, ifrom[2], i, rstart, rend;
 23:   PetscBool              flg, flg2, flg3, flg4, flg5;
 24:   PetscScalar            value, *array, *tarray = 0;
 25:   Vec                    lx, gx, gxs;
 26:   IS                     ghost;
 27:   ISLocalToGlobalMapping mapping;

 29:   PetscFunctionBeginUser;
 30:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 31:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 32:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 33:   PetscCheck(size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Must run example with two processors");

 35:   /*
 36:      Construct a two dimensional graph connecting nlocal degrees of
 37:      freedom per processor. From this we will generate the global
 38:      indices of needed ghost values

 40:      For simplicity we generate the entire graph on each processor:
 41:      in real application the graph would stored in parallel, but this
 42:      example is only to demonstrate the management of ghost padding
 43:      with VecCreateGhost().

 45:      In this example we consider the vector as representing
 46:      degrees of freedom in a one dimensional grid with periodic
 47:      boundary conditions.

 49:         ----Processor  1---------  ----Processor 2 --------
 50:          0    1   2   3   4    5    6    7   8   9   10   11
 51:                                |----|
 52:          |-------------------------------------------------|

 54:   */

 56:   if (rank == 0) {
 57:     ifrom[0] = 11;
 58:     ifrom[1] = 6;
 59:   } else {
 60:     ifrom[0] = 0;
 61:     ifrom[1] = 5;
 62:   }

 64:   /*
 65:      Create the vector with two slots for ghost points. Note that both
 66:      the local vector (lx) and the global vector (gx) share the same
 67:      array for storing vector values.
 68:   */
 69:   PetscCall(PetscOptionsHasName(NULL, NULL, "-allocate", &flg));
 70:   PetscCall(PetscOptionsHasName(NULL, NULL, "-vecmpisetghost", &flg2));
 71:   PetscCall(PetscOptionsHasName(NULL, NULL, "-minvalues", &flg3));
 72:   if (flg) {
 73:     PetscCall(PetscMalloc1(nlocal + nghost, &tarray));
 74:     PetscCall(VecCreateGhostWithArray(PETSC_COMM_WORLD, nlocal, PETSC_DECIDE, nghost, ifrom, tarray, &gxs));
 75:   } else if (flg2) {
 76:     PetscCall(VecCreate(PETSC_COMM_WORLD, &gxs));
 77:     PetscCall(VecSetType(gxs, VECMPI));
 78:     PetscCall(VecSetSizes(gxs, nlocal, PETSC_DECIDE));
 79:     PetscCall(VecMPISetGhost(gxs, nghost, ifrom));
 80:   } else {
 81:     PetscCall(VecCreateGhost(PETSC_COMM_WORLD, nlocal, PETSC_DECIDE, nghost, ifrom, &gxs));
 82:     PetscCall(VecSet(gxs, 1.0));
 83:     if (rank == 1) PetscCall(VecSetValueLocal(gxs, 0, 2.0, INSERT_VALUES));
 84:     PetscCall(VecAssemblyBegin(gxs));
 85:     PetscCall(VecAssemblyEnd(gxs));
 86:     value = 0.0;
 87:     if (rank == 1) {
 88:       PetscCall(VecGetArray(gxs, &array));
 89:       value = array[0];
 90:       PetscCall(VecRestoreArray(gxs, &array));
 91:     }
 92:     PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &value, 1, MPIU_SCALAR, MPIU_SUM, PETSC_COMM_WORLD));
 93:     PetscCheck(PetscIsCloseAtTolScalar(value, 2.0, PETSC_SMALL, PETSC_SMALL), PETSC_COMM_WORLD, PETSC_ERR_PLIB, "%g != 2.0", (double)PetscAbsScalar(value));
 94:   }

 96:   /*
 97:       Test VecDuplicate()
 98:   */
 99:   PetscCall(VecDuplicate(gxs, &gx));
100:   PetscCall(VecDestroy(&gxs));

102:   /*
103:      Access the local representation
104:   */
105:   PetscCall(VecGhostGetLocalForm(gx, &lx));

107:   /*
108:      Set the values from 0 to 12 into the "global" vector
109:   */
110:   PetscCall(VecGetOwnershipRange(gx, &rstart, &rend));
111:   for (i = rstart; i < rend; i++) {
112:     value = (PetscScalar)i;
113:     PetscCall(VecSetValues(gx, 1, &i, &value, INSERT_VALUES));
114:   }
115:   PetscCall(VecAssemblyBegin(gx));
116:   PetscCall(VecAssemblyEnd(gx));

118:   PetscCall(VecGhostUpdateBegin(gx, INSERT_VALUES, SCATTER_FORWARD));
119:   PetscCall(VecGhostUpdateEnd(gx, INSERT_VALUES, SCATTER_FORWARD));

121:   /*
122:      Print out each vector, including the ghost padding region.
123:   */
124:   PetscCall(VecGetArray(lx, &array));
125:   for (i = 0; i < nlocal + nghost; i++) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%" PetscInt_FMT " %g\n", i, (double)PetscRealPart(array[i])));
126:   PetscCall(VecRestoreArray(lx, &array));
127:   PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
128:   PetscCall(VecGhostRestoreLocalForm(gx, &lx));

130:   /* Another test that sets ghost values and then accumulates onto the owning processors using MIN_VALUES */
131:   if (flg3) {
132:     if (rank == 0) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\nTesting VecGhostUpdate with MIN_VALUES\n"));
133:     PetscCall(VecGhostGetLocalForm(gx, &lx));
134:     PetscCall(VecGetArray(lx, &array));
135:     for (i = 0; i < nghost; i++) array[nlocal + i] = rank ? (PetscScalar)4 : (PetscScalar)8;
136:     PetscCall(VecRestoreArray(lx, &array));
137:     PetscCall(VecGhostRestoreLocalForm(gx, &lx));

139:     PetscCall(VecGhostUpdateBegin(gx, MIN_VALUES, SCATTER_REVERSE));
140:     PetscCall(VecGhostUpdateEnd(gx, MIN_VALUES, SCATTER_REVERSE));

142:     PetscCall(VecGhostGetLocalForm(gx, &lx));
143:     PetscCall(VecGetArray(lx, &array));

145:     for (i = 0; i < nlocal + nghost; i++) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%" PetscInt_FMT " %g\n", i, (double)PetscRealPart(array[i])));
146:     PetscCall(VecRestoreArray(lx, &array));
147:     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
148:     PetscCall(VecGhostRestoreLocalForm(gx, &lx));
149:   }

151:   PetscCall(PetscOptionsHasName(NULL, NULL, "-vecghostgetghostis", &flg4));
152:   if (flg4) {
153:     PetscCall(VecGhostGetGhostIS(gx, &ghost));
154:     PetscCall(ISView(ghost, PETSC_VIEWER_STDOUT_WORLD));
155:   }
156:   PetscCall(PetscOptionsHasName(NULL, NULL, "-getgtlmapping", &flg5));
157:   if (flg5) {
158:     PetscCall(VecGetLocalToGlobalMapping(gx, &mapping));
159:     if (rank == 0) PetscCall(ISLocalToGlobalMappingView(mapping, NULL));
160:   }

162:   PetscCall(VecDestroy(&gx));

164:   if (flg) PetscCall(PetscFree(tarray));
165:   PetscCall(PetscFinalize());
166:   return 0;
167: }

169: /*TEST

171:      test:
172:        nsize: 2

174:      test:
175:        suffix: 2
176:        nsize: 2
177:        args: -allocate
178:        output_file: output/ex9_1.out

180:      test:
181:        suffix: 3
182:        nsize: 2
183:        args: -vecmpisetghost
184:        output_file: output/ex9_1.out

186:      test:
187:        suffix: 4
188:        nsize: 2
189:        args: -minvalues
190:        output_file: output/ex9_2.out
191:        requires: !complex

193:      test:
194:        suffix: 5
195:        nsize: 2
196:        args: -vecghostgetghostis

198:      test:
199:        suffix: 6
200:        nsize: 2
201:        args: -getgtlmapping

203: TEST*/