https://github.com/JuliaParallel/MPI.jl
Raw File
Tip revision: e3f68c8a710f10306b903df427a8e36cadf7877c authored by Katie Hyatt on 30 April 2016, 00:08:28 UTC
Wrapped and tested {i}allreduce
Tip revision: e3f68c8
test_wait.jl
# tests for the various kinds of waits
using Base.Test
using MPI

MPI.Init()
myrank = MPI.Comm_rank(MPI.COMM_WORLD)
commsize = MPI.Comm_rank(MPI.COMM_WORLD)

nsends = 2
send_arr = Array(Array{Int, 1}, nsends)
recv_arr = Array(Array{Int, 1}, nsends)

for i=1:nsends
    send_arr[i] = [i]
    recv_arr[i] = Array(Int, 1)
end

send_reqs = Array(MPI.Request, nsends)
recv_reqs = Array(MPI.Request, nsends)

# send to self
for i=1:nsends
    send_reqs[i] = MPI.Isend(send_arr[i], myrank, i, MPI.COMM_WORLD)
    recv_reqs[i] = MPI.Irecv!(recv_arr[i], myrank, i, MPI.COMM_WORLD)
end

send_check = zeros(Int, nsends)
recv_check = zeros(Int, nsends)
for i=1:nsends
    idx, stat = MPI.Waitany!(send_reqs)
    send_check[idx] += 1
    send_reqs[idx] = MPI.REQUEST_NULL
end

for i=1:nsends
  @test send_check[i] == 1
end

for i=1:nsends
    idx, stat = MPI.Waitany!(recv_reqs)
    recv_check[idx] += 1
    recv_reqs[idx] = MPI.REQUEST_NULL
end

for i=1:nsends
  @test recv_check[i] == 1
end

MPI.Barrier(MPI.COMM_WORLD)
MPI.Finalize()
back to top