Actual source code: ex1f.F90


  2: !    Description: A star forest is a simple tree with one root and zero or more leaves.
  3: !    Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
  4: !     This example creates a star forest, communicates values using the graph  views the graph, then destroys it.
  5: !
  6: !     This is a copy of ex1.c but currently only tests the broadcast operation

  8:       program main
  9: #include <petsc/finclude/petscvec.h>
 10:       use petscmpi  ! or mpi or mpi_f08
 11:       use petscvec
 12:       implicit none

 14:       PetscErrorCode                ierr
 15:       PetscInt                      i,nroots,nrootsalloc,nleaves,nleavesalloc,mine(6),stride
 16:       type(PetscSFNode)             remote(6)
 17:       PetscMPIInt                   rank,size
 18:       PetscSF                       sf
 19:       PetscInt                      rootdata(6),leafdata(6)

 21: ! used with PetscSFGetGraph()
 22:       type(PetscSFNode), pointer :: gremote(:)
 23:       PetscInt, pointer ::          gmine(:)
 24:       PetscInt                      gnroots,gnleaves;

 26:       PetscInt                      niranks,nranks
 27:       PetscMPIInt, pointer ::       iranks(:), ranks(:)
 28:       PetscInt, pointer ::          ioffset(:),irootloc(:),roffset(:),rmine(:),rremote(:)

 30:       PetscCallA(PetscInitialize(ierr))
 31:       stride = 2
 32:       PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
 33:       PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))

 35:       if (rank == 0) then
 36:          nroots = 3
 37:       else
 38:          nroots = 2
 39:       endif
 40:       nrootsalloc  = nroots * stride;
 41:       if (rank > 0) then
 42:          nleaves = 3
 43:       else
 44:          nleaves = 2
 45:       endif
 46:       nleavesalloc = nleaves * stride
 47:       if (stride > 1) then
 48:          do i=1,nleaves
 49:             mine(i) = stride * (i-1)
 50:          enddo
 51:       endif

 53: ! Left periodic neighbor
 54:       remote(1)%rank  = modulo(rank+size-1,size)
 55:       remote(1)%index = 1 * stride
 56: ! Right periodic neighbor
 57:       remote(2)%rank  = modulo(rank+1,size)
 58:       remote(2)%index = 0 * stride
 59:       if (rank > 0) then !               All processes reference rank 0, index
 60:          remote(3)%rank  = 0
 61:          remote(3)%index = 2 * stride
 62:       endif

 64: !  Create a star forest for communication
 65:       PetscCallA(PetscSFCreate(PETSC_COMM_WORLD,sf,ierr))
 66:       PetscCallA(PetscSFSetFromOptions(sf,ierr))
 67:       PetscCallA(PetscSFSetGraph(sf,nrootsalloc,nleaves,mine,PETSC_COPY_VALUES,remote,PETSC_COPY_VALUES,ierr))
 68:       PetscCallA(PetscSFSetUp(sf,ierr))

 70: !   View graph, mostly useful for debugging purposes.
 71:       PetscCallA(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL,ierr))
 72:       PetscCallA(PetscSFView(sf,PETSC_VIEWER_STDOUT_WORLD,ierr))
 73:       PetscCallA(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD,ierr))

 75: !   Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
 76: !     * user-defined structures, could also be used.
 77: !     Set rootdata buffer to be broadcast
 78:       do i=1,nrootsalloc
 79:          rootdata(i) = -1
 80:       enddo
 81:       do i=1,nroots
 82:          rootdata(1 + (i-1)*stride) = 100*(rank+1) + i - 1
 83:       enddo

 85: !     Initialize local buffer, these values are never used.
 86:       do i=1,nleavesalloc
 87:          leafdata(i) = -1
 88:       enddo

 90: !     Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls.
 91:       PetscCallA(PetscSFBcastBegin(sf,MPIU_INTEGER,rootdata,leafdata,MPI_REPLACE,ierr))
 92:       PetscCallA(PetscSFBcastEnd(sf,MPIU_INTEGER,rootdata,leafdata,MPI_REPLACE,ierr))
 93:       PetscCallA(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Rootdata\n",ierr))
 94:       PetscCallA(PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD,ierr))
 95:       PetscCallA(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Leafdata\n",ierr))
 96:       PetscCallA(PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD,ierr))

 98: !     Reduce entries from leafdata to rootdata. Computation or other communication can be performed between the begin and end calls.
 99:       PetscCallA(PetscSFReduceBegin(sf,MPIU_INTEGER,leafdata,rootdata,MPI_SUM,ierr))
100:       PetscCallA(PetscSFReduceEnd(sf,MPIU_INTEGER,leafdata,rootdata,MPI_SUM,ierr))
101:       PetscCallA(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Leafdata\n",ierr))
102:       PetscCallA(PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD,ierr))
103:       PetscCallA(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Rootdata\n",ierr))
104:       PetscCallA(PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD,ierr))

106:       PetscCallA(PetscSFGetGraph(sf,gnroots,gnleaves,gmine,gremote,ierr))
107:       if (gnleaves .ne. nleaves) then; SETERRA(PETSC_COMM_WORLD,PETSC_ERR_PLIB,'nleaves returned from PetscSFGetGraph() does not match that set with PetscSFSetGraph()'); endif
108:          do i=1,nleaves
109:          if (gmine(i) .ne. mine(i)) then; SETERRA(PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Root from PetscSFGetGraph() does not match that set with PetscSFSetGraph()'); endif
110:       enddo
111:       do i=1,nleaves
112:          if (gremote(i)%index .ne. remote(i)%index) then; SETERRA(PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Leaf from PetscSFGetGraph() does not match that set with PetscSFSetGraph()'); endif
113:       enddo

115:       deallocate(gremote)

117: ! Test PetscSFGet{Leaf,Root}Ranks
118:       PetscCallA(PetscSFGetLeafRanks(sf,niranks,iranks,ioffset,irootloc,ierr))
119:       PetscCallA(PetscSFGetRootRanks(sf,nranks,ranks,roffset,rmine,rremote,ierr))

121: !    Clean storage for star forest.
122:       PetscCallA(PetscSFDestroy(sf,ierr))

124: !  Create a star forest with continuous leaves and hence no buffer
125:       PetscCallA(PetscSFCreate(PETSC_COMM_WORLD,sf,ierr))
126:       PetscCallA(PetscSFSetFromOptions(sf,ierr))
127:       PetscCallA(PetscSFSetGraph(sf,nrootsalloc,nleaves,PETSC_NULL_INTEGER,PETSC_COPY_VALUES,remote,PETSC_COPY_VALUES,ierr))
128:       PetscCallA(PetscSFSetUp(sf,ierr))

130: !   View graph, mostly useful for debugging purposes.
131:       PetscCallA(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL,ierr))
132:       PetscCallA(PetscSFView(sf,PETSC_VIEWER_STDOUT_WORLD,ierr))
133:       PetscCallA(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD,ierr))

135:       PetscCallA(PetscSFGetGraph(sf,gnroots,gnleaves,gmine,gremote,ierr))
136:       if (loc(gmine) .ne. loc(PETSC_NULL_INTEGER)) then; SETERRA(PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Leaves from PetscSFGetGraph() not null as expected'); endif
137:       deallocate(gremote)
138:       PetscCallA(PetscSFDestroy(sf,ierr))
139:       PetscCallA(PetscFinalize(ierr))
140:   end

142: !/*TEST
143: !  build:
144: !    requires: defined(PETSC_HAVE_FORTRAN_TYPE_STAR)
145: !
146: !  test:
147: !    nsize: 3
148: !
149: !TEST*/