Actual source code: ex9.c
2: static char help[] = "Demonstrates use of VecCreateGhost().\n\n";
4: /*
5: Description: Ghost padding is one way to handle local calculations that
6: involve values from other processors. VecCreateGhost() provides
7: a way to create vectors with extra room at the end of the vector
8: array to contain the needed ghost values from other processors,
9: vector computations are otherwise unaffected.
10: */
12: /*
13: Include "petscvec.h" so that we can use vectors. Note that this file
14: automatically includes:
15: petscsys.h - base PETSc routines petscis.h - index sets
16: petscviewer.h - viewers
17: */
18: #include <petscvec.h>
20: int main(int argc, char **argv)
21: {
22: PetscMPIInt rank, size;
23: PetscInt nlocal = 6, nghost = 2, ifrom[2], i, rstart, rend;
24: PetscBool flg, flg2, flg3;
25: PetscScalar value, *array, *tarray = 0;
26: Vec lx, gx, gxs;
28: PetscFunctionBeginUser;
29: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
30: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
31: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
32: PetscCheck(size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Must run example with two processors");
34: /*
35: Construct a two dimensional graph connecting nlocal degrees of
36: freedom per processor. From this we will generate the global
37: indices of needed ghost values
39: For simplicity we generate the entire graph on each processor:
40: in real application the graph would stored in parallel, but this
41: example is only to demonstrate the management of ghost padding
42: with VecCreateGhost().
44: In this example we consider the vector as representing
45: degrees of freedom in a one dimensional grid with periodic
46: boundary conditions.
48: ----Processor 1--------- ----Processor 2 --------
49: 0 1 2 3 4 5 6 7 8 9 10 11
50: |----|
51: |-------------------------------------------------|
53: */
55: if (rank == 0) {
56: ifrom[0] = 11;
57: ifrom[1] = 6;
58: } else {
59: ifrom[0] = 0;
60: ifrom[1] = 5;
61: }
63: /*
64: Create the vector with two slots for ghost points. Note that both
65: the local vector (lx) and the global vector (gx) share the same
66: array for storing vector values.
67: */
68: PetscCall(PetscOptionsHasName(NULL, NULL, "-allocate", &flg));
69: PetscCall(PetscOptionsHasName(NULL, NULL, "-vecmpisetghost", &flg2));
70: PetscCall(PetscOptionsHasName(NULL, NULL, "-minvalues", &flg3));
71: if (flg) {
72: PetscCall(PetscMalloc1(nlocal + nghost, &tarray));
73: PetscCall(VecCreateGhostWithArray(PETSC_COMM_WORLD, nlocal, PETSC_DECIDE, nghost, ifrom, tarray, &gxs));
74: } else if (flg2) {
75: PetscCall(VecCreate(PETSC_COMM_WORLD, &gxs));
76: PetscCall(VecSetType(gxs, VECMPI));
77: PetscCall(VecSetSizes(gxs, nlocal, PETSC_DECIDE));
78: PetscCall(VecMPISetGhost(gxs, nghost, ifrom));
79: } else {
80: PetscCall(VecCreateGhost(PETSC_COMM_WORLD, nlocal, PETSC_DECIDE, nghost, ifrom, &gxs));
81: }
83: /*
84: Test VecDuplicate()
85: */
86: PetscCall(VecDuplicate(gxs, &gx));
87: PetscCall(VecDestroy(&gxs));
89: /*
90: Access the local representation
91: */
92: PetscCall(VecGhostGetLocalForm(gx, &lx));
94: /*
95: Set the values from 0 to 12 into the "global" vector
96: */
97: PetscCall(VecGetOwnershipRange(gx, &rstart, &rend));
98: for (i = rstart; i < rend; i++) {
99: value = (PetscScalar)i;
100: PetscCall(VecSetValues(gx, 1, &i, &value, INSERT_VALUES));
101: }
102: PetscCall(VecAssemblyBegin(gx));
103: PetscCall(VecAssemblyEnd(gx));
105: PetscCall(VecGhostUpdateBegin(gx, INSERT_VALUES, SCATTER_FORWARD));
106: PetscCall(VecGhostUpdateEnd(gx, INSERT_VALUES, SCATTER_FORWARD));
108: /*
109: Print out each vector, including the ghost padding region.
110: */
111: PetscCall(VecGetArray(lx, &array));
112: for (i = 0; i < nlocal + nghost; i++) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%" PetscInt_FMT " %g\n", i, (double)PetscRealPart(array[i])));
113: PetscCall(VecRestoreArray(lx, &array));
114: PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
115: PetscCall(VecGhostRestoreLocalForm(gx, &lx));
117: /* Another test that sets ghost values and then accumulates onto the owning processors using MIN_VALUES */
118: if (flg3) {
119: if (rank == 0) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\nTesting VecGhostUpdate with MIN_VALUES\n"));
120: PetscCall(VecGhostGetLocalForm(gx, &lx));
121: PetscCall(VecGetArray(lx, &array));
122: for (i = 0; i < nghost; i++) array[nlocal + i] = rank ? (PetscScalar)4 : (PetscScalar)8;
123: PetscCall(VecRestoreArray(lx, &array));
124: PetscCall(VecGhostRestoreLocalForm(gx, &lx));
126: PetscCall(VecGhostUpdateBegin(gx, MIN_VALUES, SCATTER_REVERSE));
127: PetscCall(VecGhostUpdateEnd(gx, MIN_VALUES, SCATTER_REVERSE));
129: PetscCall(VecGhostGetLocalForm(gx, &lx));
130: PetscCall(VecGetArray(lx, &array));
132: for (i = 0; i < nlocal + nghost; i++) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%" PetscInt_FMT " %g\n", i, (double)PetscRealPart(array[i])));
133: PetscCall(VecRestoreArray(lx, &array));
134: PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
135: PetscCall(VecGhostRestoreLocalForm(gx, &lx));
136: }
138: PetscCall(VecDestroy(&gx));
140: if (flg) PetscCall(PetscFree(tarray));
141: PetscCall(PetscFinalize());
142: return 0;
143: }
145: /*TEST
147: test:
148: nsize: 2
150: test:
151: suffix: 2
152: nsize: 2
153: args: -allocate
154: output_file: output/ex9_1.out
156: test:
157: suffix: 3
158: nsize: 2
159: args: -vecmpisetghost
160: output_file: output/ex9_1.out
162: test:
163: suffix: 4
164: nsize: 2
165: args: -minvalues
166: output_file: output/ex9_2.out
167: requires: !complex
169: TEST*/