Actual source code: ex14f90.F90
1: #include <petsc/finclude/petsc.h>
2: program ex14f90
3: use petsc
4: use mpi ! needed when PETSC_HAVE_MPI_F90MODULE is not true to define MPI_REPLACE
5: implicit none
7: type(tDM) :: dm
8: type(tVec) :: u
9: type(tPetscSection) :: section
10: PetscInt :: dim, numFields, numBC
11: PetscMPIInt :: rank
12: PetscInt, dimension(2) :: numComp
13: PetscInt, dimension(12) :: numDof
14: PetscInt, dimension(:), pointer :: remoteOffsets
15: type(tPetscSF) :: pointSF
16: type(tPetscSF) :: sectionSF
17: PetscScalar, dimension(:), pointer :: array
18: PetscReal :: val
19: PetscErrorCode :: ierr
20: PetscInt :: zero = 0, one = 1
22: PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER, ierr))
23: PetscCallA(DMCreate(PETSC_COMM_WORLD, dm, ierr))
24: PetscCallA(DMSetType(dm, DMPLEX, ierr))
25: PetscCallA(DMSetFromOptions(dm, ierr))
26: PetscCallA(DMViewFromOptions(dm, PETSC_NuLL_OBJECT, "-dm_view", ierr))
27: PetscCallA(DMGetDimension(dm, dim, ierr))
29: !!! Describe the solution variables that are discretized on the mesh
30: ! Create scalar field u and a vector field v
31: numFields = 2
32: numComp = [one, dim]
33: numDof = 0
34: !Let u be defined on cells
35: numDof(0*(dim + 1) + dim + 1) = 1
36: !Let v be defined on vertices
37: numDof(1*(dim + 1) + 1) = dim
38: !No boundary conditions */
39: numBC = 0
41: !!! Create a PetscSection to handle the layout of the discretized variables
42: PetscCallA(DMSetNumFields(dm, numFields, ierr))
43: PetscCallA(DMPlexCreateSection(dm, PETSC_NULL_DMLABEL_ARRAY, numComp, numDof, numBC, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_IS_ARRAY, PETSC_NULL_IS_ARRAY, PETSC_NULL_IS, section, ierr))
44: ! Name the Field variables
45: PetscCallA(PetscSectionSetFieldName(section, zero, "u", ierr))
46: PetscCallA(PetscSectionSetFieldName(section, one, "v", ierr))
47: ! Tell the DM to use this data layout
48: PetscCallA(DMSetLocalSection(dm, section, ierr))
50: !!! Construct the communication pattern for halo exchange between local vectors */
51: ! Get the point SF: an object that says which copies of mesh points (cells,
52: ! vertices, faces, edges) are copies of points on other processes
53: PetscCallA(DMGetPointSF(dm, pointSF, ierr))
54: ! Relate the locations of ghost degrees of freedom on this process
55: ! to their locations of the non-ghost copies on a different process
56: PetscCallA(PetscSFCreateRemoteOffsets(pointSF, section, section, remoteOffsets, ierr))
57: ! Use that information to construct a star forest for halo exchange
58: ! for data described by the local section
59: PetscCallA(PetscSFCreateSectionSF(pointSF, section, remoteOffsets, section, sectionSF, ierr))
60: if (associated(remoteOffsets)) then
61: PetscCallA(PetscSFDestroyRemoteOffsets(remoteOffsets, ierr))
62: end if
64: !!! Demo of halo exchange
65: ! Create a Vec with this layout
66: PetscCallA(DMCreateLocalVector(dm, u, ierr))
67: PetscCallA(PetscObjectSetName(u, "Local vector", ierr))
68: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
69: ! Set all mesh values to the MPI rank
70: val = rank
71: PetscCallA(VecSet(u, val, ierr))
72: ! Get the raw array of values
73: PetscCallA(VecGetArrayWrite(u, array, ierr))
74: !!! HALO EXCHANGE
75: PetscCallA(PetscSFBcastBegin(sectionSF, MPIU_SCALAR, array, array, MPI_REPLACE, ierr))
76: ! local work can be done between Begin() and End()
77: PetscCallA(PetscSFBcastEnd(sectionSF, MPIU_SCALAR, array, array, MPI_REPLACE, ierr))
78: ! Restore the raw array of values
79: PetscCallA(VecRestoreArrayWrite(u, array, ierr))
80: ! View the results: should show which process has the non-ghost copy of each degree of freedom
81: PetscCallA(PetscSectionVecView(section, u, PETSC_VIEWER_STDOUT_WORLD, ierr))
82: PetscCallA(VecDestroy(u, ierr))
84: PetscCallA(PetscSFDestroy(sectionSF, ierr))
85: PetscCallA(PetscSectionDestroy(section, ierr))
86: PetscCallA(DMDestroy(dm, ierr))
87: PetscCallA(PetscFinalize(ierr))
88: end program ex14f90
89: !/*TEST
90: ! # Test on a 1D mesh with overlap
91: ! test:
92: ! nsize: 3
93: ! requires: !complex
94: ! args: -dm_plex_dim 1 -dm_plex_box_faces 3 -dm_refine_pre 1 -petscpartitioner_type simple -dm_distribute_overlap 1
95: !
96: !TEST*/