Actual source code: ex8f.F90
1: !
2: ! Description: Demonstrates using a local ordering to set values into a parallel vector
3: !
4: #include <petsc/finclude/petscvec.h>
5: program main
6: use petscvec
8: implicit none
10: PetscErrorCode ierr
11: PetscMPIInt rank
12: PetscInt :: i, ng, rstart, rend, M
13: PetscInt, pointer, dimension(:) :: gindices
14: Vec :: x
15: ISLocalToGlobalMapping :: ltog
17: PetscCallA(PetscInitialize(ierr))
18: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
20: !
21: ! Create a parallel vector.
22: ! - In this case, we specify the size of each processor's local
23: ! portion, and PETSc computes the global size. Alternatively,
24: ! PETSc could determine the vector's distribution if we specify
25: ! just the global size.
26: !
27: PetscCallA(VecCreate(PETSC_COMM_WORLD, x, ierr))
28: PetscCallA(VecSetSizes(x, rank + 1_PETSC_INT_KIND, PETSC_DECIDE, ierr))
29: PetscCallA(VecSetFromOptions(x, ierr))
31: #if defined(PETSC_USE_COMPLEX)
32: ! Fortran () automatically sets the complex KIND to correspond to the KIND of the constant arguments
33: PetscCallA(VecSet(x, (1.0_PETSC_REAL_KIND, 0.0_PETSC_REAL_KIND), ierr))
34: ! Alternatively one can set it explicitly using
35: PetscCallA(VecSet(x, cmplx(1.0_PETSC_REAL_KIND, 0.0_PETSC_REAL_KIND, PETSC_REAL_KIND), ierr))
36: #else
37: PetscCallA(VecSet(x, 1.0_PETSC_REAL_KIND, ierr))
38: ! Alternatively one can set it explicitly using
39: PetscCallA(VecSet(x, 1.0_PETSC_REAL_KIND, ierr))
40: #endif
41: !
42: ! Set the local to global ordering for the vector. Each processor
43: ! generates a list of the global indices for each local index. Note that
44: ! the local indices are just whatever is convenient for a particular application.
45: ! In this case we treat the vector as lying on a one dimensional grid and
46: ! have one ghost point on each end of the blocks owned by each processor.
47: !
49: PetscCallA(VecGetSize(x, M, ierr))
50: PetscCallA(VecGetOwnershipRange(x, rstart, rend, ierr))
51: ng = rend - rstart + 2
52: allocate (gindices(0:ng - 1))
53: gindices(0) = rstart - 1
55: do i = 0, ng - 2
56: gindices(i + 1) = gindices(i) + 1
57: end do
59: ! map the first and last point as periodic
61: if (gindices(0) == -1) gindices(0) = M - 1
63: if (gindices(ng - 1) == M) gindices(ng - 1) = 0
65: PetscCallA(ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, 1_PETSC_INT_KIND, ng, gindices, PETSC_COPY_VALUES, ltog, ierr))
66: PetscCallA(VecSetLocalToGlobalMapping(x, ltog, ierr))
67: PetscCallA(ISLocalToGlobalMappingDestroy(ltog, ierr))
68: deallocate (gindices)
70: ! Set the vector elements.
71: ! - In this case set the values using the local ordering
72: ! - Each processor can contribute any vector entries,
73: ! regardless of which processor "owns" them; any nonlocal
74: ! contributions will be transferred to the appropriate processor
75: ! during the assembly process.
76: ! - In this example, the flag ADD_VALUES indicates that all
77: ! contributions will be added together.
79: do i = 0, ng - 1
80: #if defined(PETSC_USE_COMPLEX)
81: PetscCallA(VecSetValuesLocal(x, 1_PETSC_INT_KIND, [i], [(1.0_PETSC_REAL_KIND, 0.0_PETSC_REAL_KIND)], ADD_VALUES, ierr))
82: #else
83: PetscCallA(VecSetValuesLocal(x, 1_PETSC_INT_KIND, [i], [1.0_PETSC_REAL_KIND], ADD_VALUES, ierr))
84: #endif
85: end do
87: !
88: ! Assemble vector, using the 2-step process:
89: ! VecAssemblyBegin(), VecAssemblyEnd()
90: ! Computations can be done while messages are in transition
91: ! by placing code between these two statements.
92: !
93: PetscCallA(VecAssemblyBegin(x, ierr))
94: PetscCallA(VecAssemblyEnd(x, ierr))
95: !
96: ! View the vector; then destroy it.
97: !
98: PetscCallA(VecView(x, PETSC_VIEWER_STDOUT_WORLD, ierr))
99: PetscCallA(VecDestroy(x, ierr))
100: PetscCallA(PetscFinalize(ierr))
102: end program
104: !/*TEST
105: !
106: ! test:
107: ! nsize: 4
108: ! output_file: output/ex8_1.out
109: !
110: !TEST*/