Skip to content

Commit 0a643dc

Browse files
gasagnasimonbyrne
authored andcommitted
add sendrecv wrapper (#317)
* add `sendrecv` wrapper * added docs * added other Sendrecv methods
1 parent f8d9cde commit 0a643dc

File tree

2 files changed

+118
-1
lines changed

2 files changed

+118
-1
lines changed

src/pointtopoint.jl

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -341,6 +341,65 @@ function irecv(src::Integer, tag::Integer, comm::Comm)
341341
(true, MPI.deserialize(buf), stat)
342342
end
343343

344+
"""
345+
Sendrecv(sendbuf, sendcount::Integer, sendtype::Union{Datatype, MPI_Datatype}, dest::Integer, sendtag::Integer,
346+
recvbuf, recvcount::Integer, recvtype::Union{Datatype, MPI_Datatype}, source::Integer, recvtag::Integer,
347+
comm::Comm)
348+
349+
Complete a blocking send-receive operation over the MPI communicator `comm`. Send
350+
`sendcount` elements of type `sendtype` from `sendbuf` to the MPI rank `dest` using message
351+
tag `tag`, and receive `recvcount` elements of type `recvtype` from MPI rank `source` into
352+
the buffer `recvbuf` using message tag `tag`. Return an MPI.Status object.
353+
"""
354+
function Sendrecv(sendbuf, sendcount::Integer, sendtype::Union{Datatype, MPI_Datatype}, dest::Integer, sendtag::Integer,
355+
recvbuf, recvcount::Integer, recvtype::Union{Datatype, MPI_Datatype}, source::Integer, recvtag::Integer,
356+
comm::Comm)
357+
# int MPI_Sendrecv(const void *sendbuf, int sendcount, MPI_Datatype sendtype, int dest, int sendtag,
358+
# void *recvbuf, int recvcount, MPI_Datatype recvtype, int source, int recvtag,
359+
# MPI_Comm comm, MPI_Status *status)
360+
stat_ref = Ref{Status}()
361+
@mpichk ccall((:MPI_Sendrecv, libmpi), Cint,
362+
(MPIPtr, Cint, MPI_Datatype, Cint, Cint,
363+
MPIPtr, Cint, MPI_Datatype, Cint, Cint,
364+
MPI_Comm, Ptr{Status}),
365+
sendbuf, sendcount, sendtype, dest, sendtag,
366+
recvbuf, recvcount, recvtype, source, recvtag, comm, stat_ref)
367+
return stat_ref[]
368+
end
369+
370+
"""
371+
Sendrecv(sendbuf::MPIBuffertype{T}, sendcount::Integer, dest::Integer, sendtag::Integer,
372+
recvbuf::MPIBuffertype{T}, recvcount::Integer, source::Integer, recvtag::Integer,
373+
comm::Comm) where {T}
374+
375+
Complete a blocking send-receive operation over the MPI communicator `comm`, sending
376+
`sendcount` elements of type `T` from `sendbuf` to the MPI rank `dest` using message
377+
tag `tag`, and receiving `recvcount` elements of the same type from MPI rank `source` into
378+
the buffer `recvbuf` using message tag `tag`. Return an MPI.Status object.
379+
"""
380+
function Sendrecv(sendbuf::MPIBuffertype{T}, sendcount::Integer, dest::Integer, sendtag::Integer,
381+
recvbuf::MPIBuffertype{T}, recvcount::Integer, source::Integer, recvtag::Integer,
382+
comm::Comm) where {T}
383+
return Sendrecv(sendbuf, sendcount, mpitype(eltype(sendbuf)), dest, sendtag,
384+
recvbuf, recvcount, mpitype(eltype(recvbuf)), source, recvtag, comm)
385+
end
386+
387+
"""
388+
Sendrecv(sendbuf::AbstractArray{T}, dest::Integer, sendtag::Integer,
389+
recvbuf::AbstractArray{T}, source::Integer, recvtag::Integer,
390+
comm::Comm) where {T}
391+
392+
Complete a blocking send-receive operation over the MPI communicator `comm`, sending
393+
`sendbuf` to the MPI rank `dest` using message tag `tag`, and receiving the buffer
394+
`recvbuf` using message tag `tag`. Return an MPI.Status object.
395+
"""
396+
function Sendrecv(sendbuf::AbstractArray{T}, dest::Integer, sendtag::Integer,
397+
recvbuf::AbstractArray{T}, source::Integer, recvtag::Integer,
398+
comm::Comm) where {T}
399+
return Sendrecv(sendbuf, length(sendbuf), dest, sendtag,
400+
recvbuf, length(recvbuf), source, recvtag, comm)
401+
end
402+
344403
"""
345404
Wait!(req::Request)
346405

test/test_sendrecv.jl

Lines changed: 59 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,5 +79,63 @@ MPI.Cancel!(sreq)
7979

8080
GC.gc()
8181

82+
# ---------------------
83+
# MPI_Sendrecv function
84+
# ---------------------
85+
#
86+
# send datatype
87+
# ---------------------
88+
# We test this function by executing a left shift of the leftmost element in a 1D
89+
# cartesian topology with periodic boundary conditions.
90+
#
91+
# Assume we have two processors, the data will look like this
92+
# proc 0 | proc 1
93+
# 0 0 0 | 1 1 1
94+
#
95+
# After the shift the data will contain
96+
# proc 0 | proc 1
97+
# 0 0 1 | 1 1 0
98+
#
99+
# init data
100+
comm_rank = MPI.Comm_rank(comm)
101+
comm_size = MPI.Comm_size(comm)
102+
a = Float64[comm_rank, comm_rank, comm_rank]
103+
104+
# construct subarray type
105+
subarr_send = MPI.Type_Create_Subarray(1, Cint[3], Cint[1], Cint[0], MPI.MPI_ORDER_FORTRAN, Float64)
106+
subarr_recv = MPI.Type_Create_Subarray(1, Cint[3], Cint[1], Cint[2], MPI.MPI_ORDER_FORTRAN, Float64)
107+
MPI.Type_Commit!(subarr_send)
108+
MPI.Type_Commit!(subarr_recv)
109+
110+
# construct cartesian communicator with 1D topology
111+
comm_cart = MPI.Cart_create(comm, 1, Cint[comm_size], Cint[1], false)
112+
113+
# get source and dest ranks using Cart_shift
114+
src_rank, dest_rank = MPI.Cart_shift(comm_cart, 0, -1)
115+
116+
# execute left shift using subarrays
117+
MPI.Sendrecv(a, 1, subarr_send, dest_rank, 0,
118+
a, 1, subarr_recv, src_rank, 0, comm_cart)
119+
120+
@test a == [comm_rank, comm_rank, (comm_rank+1) % comm_size]
121+
122+
# send elements from a buffer
123+
# ---------------------------
124+
a = Float64[comm_rank, comm_rank, comm_rank]
125+
b = Float64[ -1, -1, -1]
126+
MPI.Sendrecv(a, 2, dest_rank, 1,
127+
b, 2, src_rank, 1, comm_cart)
128+
129+
@test b == [(comm_rank+1) % comm_size, (comm_rank+1) % comm_size, -1]
130+
131+
# send entire buffer
132+
# ---------------------------
133+
a = Float64[comm_rank, comm_rank, comm_rank]
134+
b = Float64[ -1, -1, -1]
135+
MPI.Sendrecv(a, dest_rank, 2,
136+
b, src_rank, 2, comm_cart)
137+
138+
@test b == [(comm_rank+1) % comm_size, (comm_rank+1) % comm_size, (comm_rank+1) % comm_size]
139+
82140
MPI.Finalize()
83-
@test MPI.Finalized()
141+
# @test MPI.Finalized()

0 commit comments

Comments
 (0)