Actual source code: ex14f90.F90
1: program ex14f90
3: #include <petsc/finclude/petsc.h>
4: use petsc
5: use mpi ! needed when PETSC_HAVE_MPI_F90MODULE is not true to define MPI_REPLACE
6: implicit none
8: type(tDM) :: dm
9: type(tVec) :: u
10: type(tPetscSection) :: section
11: PetscInt :: dim, numFields, numBC
12: PetscMPIInt :: rank
13: PetscInt,dimension(2) :: numComp
14: PetscInt,dimension(12) :: numDof
15: PetscInt,dimension(:),pointer :: remoteOffsets
16: type(tPetscSF) :: pointSF
17: type(tPetscSF) :: sectionSF
18: PetscScalar,dimension(:),pointer :: array
19: PetscReal :: val
20: PetscErrorCode :: ierr
21: PetscInt :: zero = 0, one = 1
23: PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER, ierr))
24: PetscCallA(DMCreate(PETSC_COMM_WORLD, dm, ierr))
25: PetscCallA(DMSetType(dm, DMPLEX, ierr))
26: PetscCallA(DMSetFromOptions(dm, ierr))
27: PetscCallA(DMViewFromOptions(dm, PETSC_NuLL_OBJECT, "-dm_view", ierr))
28: PetscCallA(DMGetDimension(dm, dim, ierr))
30: !!! Describe the solution variables that are discretized on the mesh
31: ! Create scalar field u and a vector field v
32: numFields = 2
33: numComp = [one,dim]
34: numDof = 0
35: !Let u be defined on cells
36: numDof(0 * (dim + 1) + dim + 1) = 1;
37: !Let v be defined on vertices
38: numDof(1 * (dim + 1) + 1) = dim
39: !No boundary conditions */
40: numBC = 0
42: !!! Create a PetscSection to handle the layout of the discretized variables
43: PetscCallA(DMSetNumFields(dm, numFields, ierr))
44: PetscCallA(DMPlexCreateSection(dm,PETSC_NULL_DMLABEL_ARRAY,numComp,numDof,numBC,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_IS_ARRAY,PETSC_NULL_IS_ARRAY,PETSC_NULL_IS,section, ierr))
45: ! Name the Field variables
46: PetscCallA(PetscSectionSetFieldName(section, zero, "u", ierr))
47: PetscCallA(PetscSectionSetFieldName(section, one, "v", ierr))
48: ! Tell the DM to use this data layout
49: PetscCallA(DMSetLocalSection(dm, section, ierr))
51: !!! Construct the communication pattern for halo exchange between local vectors */
52: ! Get the point SF: an object that says which copies of mesh points (cells,
53: ! vertices, faces, edges) are copies of points on other processes
54: PetscCallA(DMGetPointSF(dm, pointSF, ierr))
55: ! Relate the locations of ghost degrees of freedom on this process
56: ! to their locations of the non-ghost copies on a different process
57: PetscCallA(PetscSFCreateRemoteOffsets(pointSF, section, section, remoteOffsets, ierr))
58: ! Use that information to construct a star forest for halo exchange
59: ! for data described by the local section
60: PetscCallA(PetscSFCreateSectionSF(pointSF, section, remoteOffsets, section, sectionSF, ierr))
61: if (associated(remoteOffsets)) then
62: PetscCallA(PetscSFDestroyRemoteOffsets(remoteOffsets, ierr))
63: end if
65: !!! Demo of halo exchange
66: ! Create a Vec with this layout
67: PetscCallA(DMCreateLocalVector(dm, u, ierr))
68: PetscCallA(PetscObjectSetName(u, "Local vector", ierr))
69: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
70: ! Set all mesh values to the MPI rank
71: val = rank
72: PetscCallA(VecSet(u, val, ierr))
73: ! Get the raw array of values
74: PetscCallA(VecGetArrayWrite(u, array, ierr))
75: !!! HALO EXCHANGE
76: PetscCallA(PetscSFBcastBegin(sectionSF, MPIU_SCALAR, array, array, MPI_REPLACE, ierr))
77: ! local work can be done between Begin() and End()
78: PetscCallA(PetscSFBcastEnd(sectionSF, MPIU_SCALAR, array, array, MPI_REPLACE, ierr))
79: ! Restore the raw array of values
80: PetscCallA(VecRestoreArrayWrite(u, array, ierr))
81: ! View the results: should show which process has the non-ghost copy of each degree of freedom
82: PetscCallA(PetscSectionVecView(section, u, PETSC_VIEWER_STDOUT_WORLD, ierr))
83: PetscCallA(VecDestroy(u, ierr))
85: PetscCallA(PetscSFDestroy(sectionSF, ierr))
86: PetscCallA(PetscSectionDestroy(section, ierr))
87: PetscCallA(DMDestroy(dm, ierr))
88: PetscCallA(PetscFinalize(ierr))
89: end program ex14f90
90: !/*TEST
91: ! build:
92: ! requires: defined(PETSC_USING_F90FREEFORM)
93: !
94: ! # Test on a 1D mesh with overlap
95: ! test:
96: ! nsize: 3
97: ! requires: !complex
98: ! args: -dm_plex_dim 1 -dm_plex_box_faces 3 -dm_refine_pre 1 -petscpartitioner_type simple -dm_distribute_overlap 1
99: !
100: !TEST*/