Skip to content

Commit 7a9a523

Browse files
committed
rename
add scalar test
1 parent 10d4a29 commit 7a9a523

File tree

4 files changed

+56
-3
lines changed

4 files changed

+56
-3
lines changed

src/interface.f90

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -210,7 +210,9 @@ end function get_native_dtype
210210

211211

212212
interface !< reader.f90
213-
213+
!! the read "value" are intent(inout) because:
214+
!! * arrays: to work correctly when actual argument is allocatable
215+
!! * scalar: to work correctly with character type
214216
module subroutine h5read_scalar(self, dname, value)
215217
class(hdf5_file), intent(in) :: self
216218
character(*), intent(in) :: dname

src/tests/unit/CMakeLists.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ max_gcd(${MPI_MAX} ${MPIEXEC_MAX_NUMPROCS} Nmpi)
55

66
message(STATUS "Unit tests using ${Nmpi} processes")
77

8-
set(test_names deflate deflate_props deflate_read)
8+
set(test_names deflate_write deflate_props deflate_read scalar)
99

1010
foreach(t IN LISTS test_names)
1111

@@ -23,7 +23,7 @@ TIMEOUT 10
2323
PROCESSORS ${Nmpi}
2424
)
2525

26-
set_tests_properties(deflate PROPERTIES
26+
set_tests_properties(deflate_write PROPERTIES
2727
FIXTURES_SETUP deflate_files
2828
)
2929

src/tests/unit/test_scalar.f90

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
program test_scalar
2+
3+
use mpi, only : mpi_init, MPI_COMM_WORLD, mpi_comm_rank
4+
use h5mpi, only : hdf5_file
5+
6+
implicit none (type, external)
7+
8+
external :: mpi_finalize
9+
10+
integer :: ierr, mpi_id
11+
12+
13+
call mpi_init(ierr)
14+
if (ierr /= 0) error stop "mpi_init"
15+
16+
call mpi_comm_rank(MPI_COMM_WORLD, mpi_id, ierr)
17+
if(ierr/=0) error stop "mpi_comm_rank"
18+
19+
call test_scalar_collective(mpi_id)
20+
21+
call mpi_finalize(ierr)
22+
if (ierr /= 0) error stop "mpi_finalize"
23+
24+
contains
25+
26+
subroutine test_scalar_collective(mpi_id)
27+
!! this would be unusual use case to write the same dataset scalar from each worker
28+
!! but it is a test of the collective write
29+
!! in general, the written dataset value is unpredicable in this write / read race condition
30+
31+
integer, intent(in) :: mpi_id
32+
33+
type(hdf5_file) :: h5
34+
integer :: i
35+
36+
call h5%open("scalar_coll.h5", action="w", mpi=.true.)
37+
38+
call h5%write("/mpi_id", mpi_id)
39+
40+
call h5%read("/mpi_id", i)
41+
42+
print '(a,i0,a,i0)', "MPI worker: ", mpi_id, "read value: ", i
43+
44+
call h5%close()
45+
46+
47+
48+
end subroutine test_scalar_collective
49+
50+
51+
end program

0 commit comments

Comments
 (0)