add mpigatherinplace example for reproducing pmpi wrapper bug

main
Bryce Allen 5 years ago
parent 7a1d10349e
commit 2434b39b53

@ -1,5 +1,5 @@
.PHONY: all .PHONY: all
all: daxpy mpi_daxpy mpienv daxpy_nvtx mpi_daxpy_nvtx_managed mpi_daxpy_nvtx_unmanaged all: daxpy mpi_daxpy mpienv daxpy_nvtx mpi_daxpy_nvtx_managed mpi_daxpy_nvtx_unmanaged mpigatherinplace
CCFLAGS = -std=c++11 CCFLAGS = -std=c++11
CUDA_HOME ?= $(CUDA_DIR) CUDA_HOME ?= $(CUDA_DIR)
@ -22,6 +22,9 @@ mpi_daxpy_nvtx_unmanaged: mpi_daxpy_nvtx.cc cuda_error.h
mpienv: mpienv.f90 mpienv: mpienv.f90
mpif90 -o mpienv mpienv.f90 mpif90 -o mpienv mpienv.f90
mpigatherinplace: mpigatherinplace.f90
mpifort -o mpigatherinplace mpigatherinplace.f90
.PHONY: clean .PHONY: clean
clean: clean:
rm -rf daxpy mpi_daxpy daxpy_nvtx mpi_daxpy_nvtx_managed mpi_daxpy_nvtx_unmanaged rm -rf daxpy mpi_daxpy daxpy_nvtx mpi_daxpy_nvtx_managed mpi_daxpy_nvtx_unmanaged

@ -0,0 +1,58 @@
program mpigatherinplace
use mpi
implicit none
integer :: rank, ierr, nmpi, i
integer :: N, err
real(kind=8), dimension(:), allocatable :: allx
real :: asum, lsum
N = 128*1024*1024
call MPI_Init(ierr)
if (ierr /= 0) then
print *, 'Failed MPI_Init: ', ierr
stop
end if
call MPI_COMM_RANK(MPI_COMM_WORLD, rank, ierr)
if (ierr /= 0) then
print *, 'Failed MPI_COMM_RANK: ', ierr
stop
end if
call MPI_COMM_SIZE(MPI_COMM_WORLD, nmpi, ierr)
if (ierr /= 0) then
print *, 'Failed MPI_COMM_SIZE: ', ierr
stop
end if
allocate(allx(N*nmpi))
lsum = 0
do i=1, N
allx(rank*N+i) = rank*i/N
lsum = lsum + allx(rank*N+i)
end do
call MPI_Allgather(MPI_IN_PLACE, 0, MPI_DOUBLE, &
& allx, N, MPI_DOUBLE, MPI_COMM_WORLD, ierr)
if (ierr /= 0) then
print *, 'Failed MPI_Allgather: ', ierr
stop
end if
asum = sum(allx)
print *, rank, "/", nmpi, " ", lsum, " ", asum
deallocate(allx)
call MPI_Finalize(ierr)
if (ierr /= 0) then
print *, 'Failed MPI_Finalize: ', ierr
stop
end if
end program mpigatherinplace
Loading…
Cancel
Save