Skip to content

add test program for MPI_Waitall #90

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 32 additions & 3 deletions src/mpi.f90
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ module mpi
integer, parameter :: MPI_REAL8 = -10014

integer, parameter :: MPI_COMM_TYPE_SHARED = 1
integer, parameter :: MPI_REQUEST_NULL = -17
integer, parameter :: MPI_PROC_NULL = -1
integer, parameter :: MPI_SUCCESS = 0

Expand Down Expand Up @@ -53,8 +54,9 @@ module mpi
module procedure MPI_Isend_3d
end interface

interface MPI_IRecv
module procedure MPI_IRecv_proc
interface MPI_Irecv
module procedure MPI_Irecv_2d
module procedure MPI_Irecv_3d
end interface

interface MPI_Allreduce
Expand Down Expand Up @@ -370,7 +372,7 @@ subroutine MPI_Isend_3d(buf, count, datatype, dest, tag, comm, request, ierror)
request = c_mpi_request_c2f(c_request)
end subroutine

subroutine MPI_Irecv_proc(buf, count, datatype, source, tag, comm, request, ierror)
subroutine MPI_Irecv_2d(buf, count, datatype, source, tag, comm, request, ierror)
use iso_c_binding, only: c_int, c_ptr
use mpi_c_bindings, only: c_mpi_irecv, c_mpi_comm_f2c, c_mpi_datatype_f2c, c_mpi_request_c2f
real(8), dimension(:,:) :: buf
Expand Down Expand Up @@ -398,6 +400,33 @@ subroutine MPI_Irecv_proc(buf, count, datatype, source, tag, comm, request, ierr
end if
end subroutine

subroutine MPI_Irecv_3d(buf, count, datatype, source, tag, comm, request, ierror)
use iso_c_binding, only: c_int, c_ptr
use mpi_c_bindings, only: c_mpi_irecv, c_mpi_comm_f2c, c_mpi_datatype_f2c, c_mpi_request_c2f
real(8), dimension(:,:,:) :: buf
integer, intent(in) :: count, source, tag
integer, intent(in) :: datatype
integer, intent(in) :: comm
integer, intent(out) :: request
integer, optional, intent(out) :: ierror
type(c_ptr) :: c_comm
integer(c_int) :: local_ierr
type(c_ptr) :: c_datatype
type(c_ptr) :: c_request

c_comm = c_mpi_comm_f2c(comm)
c_datatype = c_mpi_datatype_f2c(datatype)
local_ierr = c_mpi_irecv(buf, count, c_datatype, source, tag, c_comm, c_request)
request = c_mpi_request_c2f(c_request)

if (present(ierror)) then
ierror = local_ierr
else
if (local_ierr /= MPI_SUCCESS) then
print *, "MPI_Irecv failed with error code: ", local_ierr
end if
end if
end subroutine
subroutine MPI_Allreduce_scalar(sendbuf, recvbuf, count, datatype, op, comm, ierror)
use mpi_c_bindings, only: c_mpi_allreduce_scalar
real(8), intent(in) :: sendbuf
Expand Down
70 changes: 70 additions & 0 deletions tests/waitall_3d_1.f90
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
!> TODO: this program needs if (...) error stop conditions in it
program waitall_3d_1
use mpi
implicit none

integer :: ierr, rank, nprocs
integer :: num_requests, i
integer, parameter :: dim1 = 3, dim2 = 4, dim3 = 2
real(8) :: send_buffer_3d(dim1, dim2, dim3)
real(8) :: recv_buffer_3d(dim1, dim2, dim3)
integer, allocatable :: requests(:)

! Initialize MPI
call MPI_INIT(ierr)
call MPI_COMM_RANK(MPI_COMM_WORLD, rank, ierr)
call MPI_COMM_SIZE(MPI_COMM_WORLD, nprocs, ierr)

! Must have at least 2 processes
if (nprocs < 2) then
if (rank == 0) then
print *, "This program requires at least 2 processes"
endif
call MPI_FINALIZE(ierr)
stop
endif

! Allocate request array
num_requests = 2
allocate(requests(num_requests))
requests = MPI_REQUEST_NULL

! Initialize send buffer (3D array) on rank 0
if (rank == 0) then
send_buffer_3d = reshape([(real(rank * 10 + i, 8), i=1,dim1*dim2*dim3)], &
[dim1, dim2, dim3])
endif

! Clear receive buffer (3D array)
recv_buffer_3d = 0.0_8

! Non-blocking send and receive
if (rank == 0) then
! Send the full 3D array to rank 1
call MPI_ISEND(send_buffer_3d, dim1*dim2*dim3, MPI_DOUBLE_PRECISION, 1, 0, &
MPI_COMM_WORLD, requests(1), ierr)
else if (rank == 1) then
! Receive into a 3D array from rank 0
call MPI_IRECV(recv_buffer_3d, dim1*dim2*dim3, MPI_DOUBLE_PRECISION, 0, 0, &
MPI_COMM_WORLD, requests(2), ierr)
endif

! Wait for all communications to complete
if (rank == 0 .or. rank == 1) then
call MPI_WAITALL(num_requests, requests, MPI_STATUSES_IGNORE, ierr)
endif

! Print results
if (rank == 1) then
print '(A,I2)', "Process ", rank
do i = 1, dim3
print '(A,I1)', "Received 3D array, slice ", i
print '(4F8.1)', recv_buffer_3d(:,:,i)
enddo
endif

! Clean up
deallocate(requests)
call MPI_FINALIZE(ierr)

end program waitall_3d_1
Loading