Actual source code: ex1f.F90

  1: !    Description: A star forest is a simple tree with one root and zero or more leaves.
  2: !    Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
  3: !     This example creates a star forest, communicates values using the graph  views the graph, then destroys it.
  4: !
  5: !     This is a copy of ex1.c but currently only tests the broadcast operation

  7:       program main
  8: #include <petsc/finclude/petscvec.h>
  9:       use petscmpi  ! or mpi or mpi_f08
 10:       use petscvec
 11:       implicit none

 13:       PetscErrorCode                ierr
 14:       PetscInt                      i,nroots,nrootsalloc,nleaves,nleavesalloc,mine(6),stride
 15:       type(PetscSFNode)             remote(6)
 16:       PetscMPIInt                   rank,size
 17:       PetscSF                       sf
 18:       PetscInt                      rootdata(6),leafdata(6)

 20: ! used with PetscSFGetGraph()
 21:       type(PetscSFNode), pointer :: gremote(:)
 22:       PetscInt, pointer ::          gmine(:)
 23:       PetscInt                      gnroots,gnleaves;

 25:       PetscMPIInt                   niranks,nranks
 26:       PetscMPIInt, pointer ::       iranks(:), ranks(:)
 27:       PetscInt, pointer ::          ioffset(:),irootloc(:),roffset(:),rmine(:),rremote(:)

 29:       PetscCallA(PetscInitialize(ierr))
 30:       stride = 2
 31:       PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
 32:       PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))

 34:       if (rank == 0) then
 35:          nroots = 3
 36:       else
 37:          nroots = 2
 38:       endif
 39:       nrootsalloc  = nroots * stride;
 40:       if (rank > 0) then
 41:          nleaves = 3
 42:       else
 43:          nleaves = 2
 44:       endif
 45:       nleavesalloc = nleaves * stride
 46:       if (stride > 1) then
 47:          do i=1,nleaves
 48:             mine(i) = stride * (i-1)
 49:          enddo
 50:       endif

 52: ! Left periodic neighbor
 53:       remote(1)%rank  = modulo(rank+size-1,size)
 54:       remote(1)%index = 1 * stride
 55: ! Right periodic neighbor
 56:       remote(2)%rank  = modulo(rank+1,size)
 57:       remote(2)%index = 0 * stride
 58:       if (rank > 0) then !               All processes reference rank 0, index
 59:          remote(3)%rank  = 0
 60:          remote(3)%index = 2 * stride
 61:       endif

 63: !  Create a star forest for communication
 64:       PetscCallA(PetscSFCreate(PETSC_COMM_WORLD,sf,ierr))
 65:       PetscCallA(PetscSFSetFromOptions(sf,ierr))
 66:       PetscCallA(PetscSFSetGraph(sf,nrootsalloc,nleaves,mine,PETSC_COPY_VALUES,remote,PETSC_COPY_VALUES,ierr))
 67:       PetscCallA(PetscSFSetUp(sf,ierr))

 69: !   View graph, mostly useful for debugging purposes.
 70:       PetscCallA(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL,ierr))
 71:       PetscCallA(PetscSFView(sf,PETSC_VIEWER_STDOUT_WORLD,ierr))
 72:       PetscCallA(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD,ierr))

 74: !   Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
 75: !     * user-defined structures, could also be used.
 76: !     Set rootdata buffer to be broadcast
 77:       do i=1,nrootsalloc
 78:          rootdata(i) = -1
 79:       enddo
 80:       do i=1,nroots
 81:          rootdata(1 + (i-1)*stride) = 100*(rank+1) + i - 1
 82:       enddo

 84: !     Initialize local buffer, these values are never used.
 85:       do i=1,nleavesalloc
 86:          leafdata(i) = -1
 87:       enddo

 89: !     Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls.
 90:       PetscCallA(PetscSFBcastBegin(sf,MPIU_INTEGER,rootdata,leafdata,MPI_REPLACE,ierr))
 91:       PetscCallA(PetscSFBcastEnd(sf,MPIU_INTEGER,rootdata,leafdata,MPI_REPLACE,ierr))
 92:       PetscCallA(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,'## Bcast Rootdata\n',ierr))
 93:       PetscCallA(PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD,ierr))
 94:       PetscCallA(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,'## Bcast Leafdata\n',ierr))
 95:       PetscCallA(PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD,ierr))

 97: !     Reduce entries from leafdata to rootdata. Computation or other communication can be performed between the begin and end calls.
 98:       PetscCallA(PetscSFReduceBegin(sf,MPIU_INTEGER,leafdata,rootdata,MPI_SUM,ierr))
 99:       PetscCallA(PetscSFReduceEnd(sf,MPIU_INTEGER,leafdata,rootdata,MPI_SUM,ierr))
100:       PetscCallA(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,'## Reduce Leafdata\n',ierr))
101:       PetscCallA(PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD,ierr))
102:       PetscCallA(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,'## Reduce Rootdata\n',ierr))
103:       PetscCallA(PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD,ierr))

105:       PetscCallA(PetscSFGetGraph(sf,gnroots,gnleaves,gmine,gremote,ierr))
106:       PetscCheckA(gnleaves .eq. nleaves,PETSC_COMM_WORLD,PETSC_ERR_PLIB,'nleaves returned from PetscSFGetGraph() does not match that set with PetscSFSetGraph()')
107:       do i=1,nleaves
108:         PetscCheckA(gmine(i) .eq. mine(i),PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Root from PetscSFGetGraph() does not match that set with PetscSFSetGraph()')
109:       enddo
110:       do i=1,nleaves
111:        PetscCheckA(gremote(i)%index .eq. remote(i)%index,PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Leaf from PetscSFGetGraph() does not match that set with PetscSFSetGraph()')
112:       enddo

114:       deallocate(gremote)

116: ! Test PetscSFGet{Leaf,Root}Ranks
117:       PetscCallA(PetscSFGetLeafRanks(sf,niranks,iranks,ioffset,irootloc,ierr))
118:       PetscCallA(PetscSFGetRootRanks(sf,nranks,ranks,roffset,rmine,rremote,ierr))

120: !    Clean storage for star forest.
121:       PetscCallA(PetscSFDestroy(sf,ierr))

123: !  Create a star forest with continuous leaves and hence no buffer
124:       PetscCallA(PetscSFCreate(PETSC_COMM_WORLD,sf,ierr))
125:       PetscCallA(PetscSFSetFromOptions(sf,ierr))
126:       PetscCallA(PetscSFSetGraph(sf,nrootsalloc,nleaves,PETSC_NULL_INTEGER_ARRAY,PETSC_COPY_VALUES,remote,PETSC_COPY_VALUES,ierr))
127:       PetscCallA(PetscSFSetUp(sf,ierr))

129: !   View graph, mostly useful for debugging purposes.
130:       PetscCallA(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL,ierr))
131:       PetscCallA(PetscSFView(sf,PETSC_VIEWER_STDOUT_WORLD,ierr))
132:       PetscCallA(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD,ierr))

134:       PetscCallA(PetscSFGetGraph(sf,gnroots,gnleaves,gmine,gremote,ierr))
135:       PetscCheckA(loc(gmine) .eq. loc(PETSC_NULL_INTEGER),PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Leaves from PetscSFGetGraph() not null as expected')
136:       deallocate(gremote)
137:       PetscCallA(PetscSFDestroy(sf,ierr))
138:       PetscCallA(PetscFinalize(ierr))
139:   end

141: !/*TEST
142: !  build:
143: !    requires: defined(PETSC_HAVE_FORTRAN_TYPE_STAR)
144: !
145: !  test:
146: !    nsize: 3
147: !
148: !TEST*/