From 21e811611db5bd59d762a720cdb83590a1311a77 Mon Sep 17 00:00:00 2001 From: Gaurav Dhingra Date: Wed, 2 Apr 2025 17:47:43 +0530 Subject: [PATCH 1/2] add test program for MPI_Waitall --- src/mpi.f90 | 34 ++++++++++++++++++-- tests/waitall_3d_1.f90 | 70 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 101 insertions(+), 3 deletions(-) create mode 100644 tests/waitall_3d_1.f90 diff --git a/src/mpi.f90 b/src/mpi.f90 index 95a2c85..84e8e18 100644 --- a/src/mpi.f90 +++ b/src/mpi.f90 @@ -53,8 +53,9 @@ module mpi module procedure MPI_Isend_3d end interface - interface MPI_IRecv - module procedure MPI_IRecv_proc + interface MPI_Irecv + module procedure MPI_Irecv_2d + module procedure MPI_Irecv_3d end interface interface MPI_Allreduce @@ -370,7 +371,7 @@ subroutine MPI_Isend_3d(buf, count, datatype, dest, tag, comm, request, ierror) request = c_mpi_request_c2f(c_request) end subroutine - subroutine MPI_Irecv_proc(buf, count, datatype, source, tag, comm, request, ierror) + subroutine MPI_Irecv_2d(buf, count, datatype, source, tag, comm, request, ierror) use iso_c_binding, only: c_int, c_ptr use mpi_c_bindings, only: c_mpi_irecv, c_mpi_comm_f2c, c_mpi_datatype_f2c, c_mpi_request_c2f real(8), dimension(:,:) :: buf @@ -398,6 +399,33 @@ subroutine MPI_Irecv_proc(buf, count, datatype, source, tag, comm, request, ierr end if end subroutine + subroutine MPI_Irecv_3d(buf, count, datatype, source, tag, comm, request, ierror) + use iso_c_binding, only: c_int, c_ptr + use mpi_c_bindings, only: c_mpi_irecv, c_mpi_comm_f2c, c_mpi_datatype_f2c, c_mpi_request_c2f + real(8), dimension(:,:,:) :: buf + integer, intent(in) :: count, source, tag + integer, intent(in) :: datatype + integer, intent(in) :: comm + integer, intent(out) :: request + integer, optional, intent(out) :: ierror + type(c_ptr) :: c_comm + integer(c_int) :: local_ierr + type(c_ptr) :: c_datatype + type(c_ptr) :: c_request + + c_comm = c_mpi_comm_f2c(comm) + c_datatype = c_mpi_datatype_f2c(datatype) + local_ierr = c_mpi_irecv(buf, count, c_datatype, source, tag, c_comm, c_request) + request = c_mpi_request_c2f(c_request) + + if (present(ierror)) then + ierror = local_ierr + else + if (local_ierr /= MPI_SUCCESS) then + print *, "MPI_Irecv failed with error code: ", local_ierr + end if + end if + end subroutine subroutine MPI_Allreduce_scalar(sendbuf, recvbuf, count, datatype, op, comm, ierror) use mpi_c_bindings, only: c_mpi_allreduce_scalar real(8), intent(in) :: sendbuf diff --git a/tests/waitall_3d_1.f90 b/tests/waitall_3d_1.f90 new file mode 100644 index 0000000..0b7f5a1 --- /dev/null +++ b/tests/waitall_3d_1.f90 @@ -0,0 +1,70 @@ +!> TODO: this program needs if (...) error stop conditions in it +program waitall_3d_1 + use mpi + implicit none + + integer :: ierr, rank, nprocs + integer :: num_requests, i + integer, parameter :: dim1 = 3, dim2 = 4, dim3 = 2 + real(8) :: send_buffer_3d(dim1, dim2, dim3) + real(8) :: recv_buffer_3d(dim1, dim2, dim3) + integer, allocatable :: requests(:) + + ! Initialize MPI + call MPI_INIT(ierr) + call MPI_COMM_RANK(MPI_COMM_WORLD, rank, ierr) + call MPI_COMM_SIZE(MPI_COMM_WORLD, nprocs, ierr) + + ! Must have at least 2 processes + if (nprocs < 2) then + if (rank == 0) then + print *, "This program requires at least 2 processes" + endif + call MPI_FINALIZE(ierr) + stop + endif + + ! Allocate request array + num_requests = 2 + allocate(requests(num_requests)) + ! requests = MPI_REQUEST_NULL + + ! Initialize send buffer (3D array) on rank 0 + if (rank == 0) then + send_buffer_3d = reshape([(real(rank * 10 + i, 8), i=1,dim1*dim2*dim3)], & + [dim1, dim2, dim3]) + endif + + ! Clear receive buffer (3D array) + recv_buffer_3d = 0.0_8 + + ! Non-blocking send and receive + if (rank == 0) then + ! Send the full 3D array to rank 1 + call MPI_ISEND(send_buffer_3d, dim1*dim2*dim3, MPI_DOUBLE_PRECISION, 1, 0, & + MPI_COMM_WORLD, requests(1), ierr) + else if (rank == 1) then + ! Receive into a 3D array from rank 0 + call MPI_IRECV(recv_buffer_3d, dim1*dim2*dim3, MPI_DOUBLE_PRECISION, 0, 0, & + MPI_COMM_WORLD, requests(2), ierr) + endif + + ! Wait for all communications to complete + if (rank == 0 .or. rank == 1) then + call MPI_WAITALL(num_requests, requests, MPI_STATUSES_IGNORE, ierr) + endif + + ! Print results + if (rank == 1) then + print '(A,I2)', "Process ", rank + do i = 1, dim3 + print '(A,I1)', "Received 3D array, slice ", i + print '(4F8.1)', recv_buffer_3d(:,:,i) + enddo + endif + + ! Clean up + deallocate(requests) + call MPI_FINALIZE(ierr) + +end program waitall_3d_1 From 79172e9bedc0eca1df5b3f1b5713ba264475a97a Mon Sep 17 00:00:00 2001 From: Gaurav Dhingra Date: Wed, 2 Apr 2025 19:45:40 +0530 Subject: [PATCH 2/2] define MPI_REQUEST_NULL --- src/mpi.f90 | 1 + tests/waitall_3d_1.f90 | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/mpi.f90 b/src/mpi.f90 index 84e8e18..713956f 100644 --- a/src/mpi.f90 +++ b/src/mpi.f90 @@ -8,6 +8,7 @@ module mpi integer, parameter :: MPI_REAL8 = -10014 integer, parameter :: MPI_COMM_TYPE_SHARED = 1 + integer, parameter :: MPI_REQUEST_NULL = -17 integer, parameter :: MPI_PROC_NULL = -1 integer, parameter :: MPI_SUCCESS = 0 diff --git a/tests/waitall_3d_1.f90 b/tests/waitall_3d_1.f90 index 0b7f5a1..a36a91b 100644 --- a/tests/waitall_3d_1.f90 +++ b/tests/waitall_3d_1.f90 @@ -27,7 +27,7 @@ program waitall_3d_1 ! Allocate request array num_requests = 2 allocate(requests(num_requests)) - ! requests = MPI_REQUEST_NULL + requests = MPI_REQUEST_NULL ! Initialize send buffer (3D array) on rank 0 if (rank == 0) then