@@ -839,3 +839,119 @@ Exscan(sendbuf::AbstractArray, op, comm::Comm) =
839
839
Exscan! (sendbuf, similar (sendbuf), op, comm)
840
840
Exscan (object:: T , op, comm:: Comm ) where {T} =
841
841
Exscan! (Ref (object), Ref {T} (), op, comm)[]
842
+
843
+ """
844
+ Neighbor_alltoall!(sendbuf::UBuffer, recvbuf::UBuffer, comm::Comm)
845
+
846
+ Perform an all-to-all communication along the directed edges of the graph with fixed size messages.
847
+
848
+ See also [`MPI.Alltoall!`](@ref).
849
+
850
+ # External links
851
+ $(_doc_external (" MPI_Neighbor_alltoall" ))
852
+ """
853
+ function Neighbor_alltoall! (sendbuf:: UBuffer , recvbuf:: UBuffer , graph_comm:: Comm )
854
+ # int MPI_Neighbor_alltoall(const void *sendbuf, int sendcount, MPI_Datatype sendtype, void *recvbuf,
855
+ # int recvcount, MPI_Datatype recvtype, MPI_Comm graph_comm)
856
+ @mpichk ccall ((:MPI_Neighbor_alltoall , libmpi), Cint,
857
+ (MPIPtr, Cint, MPI_Datatype, MPIPtr, Cint, MPI_Datatype, MPI_Comm),
858
+ sendbuf. data, sendbuf. count, sendbuf. datatype,
859
+ recvbuf. data, recvbuf. count, recvbuf. datatype,
860
+ graph_comm) v " 3.0"
861
+ return recvbuf. data
862
+ end
863
+
864
+ Neighbor_alltoall! (sendbuf:: InPlace , recvbuf:: UBuffer , graph_comm:: Comm ) =
865
+ Neighbor_alltoall! (UBuffer (IN_PLACE), recvbuf, graph_comm)
866
+ Neighbor_alltoall! (sendrecvbuf:: UBuffer , graph_comm:: Comm ) =
867
+ Neighbor_alltoall! (IN_PLACE, sendrecvbuf, comm)
868
+ Neighbor_alltoall (sendbuf:: UBuffer , graph_comm:: Comm ) =
869
+ Neighbor_alltoall! (sendbuf, similar (sendbuf), graph_comm)
870
+
871
+ """
872
+ Neighbor_alltoallv!(sendbuf::VBuffer, recvbuf::VBuffer, graph_comm::Comm)
873
+
874
+ Perform an all-to-all communication along the directed edges of the graph with variable size messages.
875
+
876
+ See also [`MPI.Alltoallv!`](@ref).
877
+
878
+ # External links
879
+ $(_doc_external (" MPI_Neighbor_alltoallv" ))
880
+ """
881
+ function Neighbor_alltoallv! (sendbuf:: VBuffer , recvbuf:: VBuffer , graph_comm:: Comm )
882
+ # int MPI_Neighbor_alltoallv!(const void* sendbuf, const int sendcounts[],
883
+ # const int sdispls[], MPI_Datatype sendtype, void* recvbuf,
884
+ # const int recvcounts[], const int rdispls[],
885
+ # MPI_Datatype recvtype, MPI_Comm comm)
886
+ @mpichk ccall ((:MPI_Neighbor_alltoallv , libmpi), Cint,
887
+ (MPIPtr, Ptr{Cint}, Ptr{Cint}, MPI_Datatype,
888
+ MPIPtr, Ptr{Cint}, Ptr{Cint}, MPI_Datatype,
889
+ MPI_Comm),
890
+ sendbuf. data, sendbuf. counts, sendbuf. displs, sendbuf. datatype,
891
+ recvbuf. data, recvbuf. counts, recvbuf. displs, recvbuf. datatype,
892
+ graph_comm) v " 3.0"
893
+ return recvbuf. data
894
+ end
895
+
896
+ """
897
+ Neighbor_allgather!(sendbuf::Buffer, recvbuf::UBuffer, comm::Comm)
898
+
899
+ Perform an all-gather communication along the directed edges of the graph.
900
+
901
+ See also [`MPI.Allgather!`](@ref).
902
+
903
+ # External links
904
+ $(_doc_external (" MPI_Neighbor_allgather" ))
905
+ """
906
+ function Neighbor_allgather! (sendbuf:: Buffer , recvbuf:: UBuffer , graph_comm:: Comm )
907
+ # int MPI_Neighbor_allgather(const void* sendbuf, int sendcount,
908
+ # MPI_Datatype sendtype, void* recvbuf, int recvcount,
909
+ # MPI_Datatype recvtype, MPI_Comm comm)
910
+ @mpichk ccall ((:MPI_Neighbor_allgather , libmpi), Cint,
911
+ (MPIPtr, Cint, MPI_Datatype, MPIPtr, Cint, MPI_Datatype, MPI_Comm),
912
+ sendbuf. data, sendbuf. count, sendbuf. datatype,
913
+ recvbuf. data, recvbuf. count, recvbuf. datatype, graph_comm) v " 3.0"
914
+
915
+ return recvbuf. data
916
+ end
917
+ Neighbor_allgather! (sendbuf, recvbuf:: UBuffer , graph_comm:: Comm ) =
918
+ Neighbor_allgather! (Buffer_send (sendbuf), recvbuf, graph_comm)
919
+
920
+ Neighbor_allgather! (sendbuf:: Union{Ref,AbstractArray} , recvbuf:: AbstractArray , graph_comm:: Comm ) =
921
+ Neighbor_allgather! (sendbuf, UBuffer (recvbuf, length (sendbuf)), graph_comm)
922
+
923
+
924
+ function Neighbor_allgather! (sendrecvbuf:: UBuffer , graph_comm:: Comm )
925
+ Neighbor_allgather! (IN_PLACE, sendrecvbuf, graph_comm)
926
+ end
927
+
928
+ """
929
+ Neighbor_allgatherv!(sendbuf::Buffer, recvbuf::VBuffer, comm::Comm)
930
+
931
+ Perform an all-gather communication along the directed edges of the graph with variable sized data.
932
+
933
+ See also [`MPI.Allgatherv!`](@ref).
934
+
935
+ # External links
936
+ $(_doc_external (" MPI_Neighbor_allgatherv" ))
937
+ """
938
+ function Neighbor_allgatherv! (sendbuf:: Buffer , recvbuf:: VBuffer , graph_comm:: Comm )
939
+ # int MPI_Neighbor_allgatherv(const void *sendbuf, int sendcount, MPI_Datatype sendtype,
940
+ # void *recvbuf, const int recvcounts[], const int displs[],
941
+ # MPI_Datatype recvtype, MPI_Comm comm)
942
+ @mpichk ccall ((:MPI_Neighbor_allgatherv , libmpi), Cint,
943
+ (MPIPtr, Cint, MPI_Datatype, MPIPtr, Ptr{Cint}, Ptr{Cint}, MPI_Datatype, MPI_Comm),
944
+ sendbuf. data, sendbuf. count, sendbuf. datatype,
945
+ recvbuf. data, recvbuf. counts, recvbuf. displs, recvbuf. datatype, graph_comm) v " 3.0"
946
+ return recvbuf. data
947
+ end
948
+ Neighbor_allgatherv! (sendbuf, recvbuf:: VBuffer , graph_comm:: Comm ) =
949
+ Neighbor_allgatherv! (Buffer_send (sendbuf), recvbuf, graph_comm)
950
+
951
+ Neighbor_allgatherv! (sendbuf:: Union{Ref,AbstractArray} , recvbuf:: AbstractArray , graph_comm:: Comm ) =
952
+ Neighbor_allgatherv! (sendbuf, VBuffer (recvbuf, length (sendbuf)), graph_comm)
953
+
954
+
955
+ function Neighbor_allgatherv! (sendrecvbuf:: VBuffer , graph_comm:: Comm )
956
+ Neighbor_allgatherv! (IN_PLACE, sendrecvbuf, graph_comm)
957
+ end
0 commit comments