Skip to content

Commit a27c829

Browse files
committed
meta
1 parent 30294c1 commit a27c829

File tree

5 files changed

+289
-2
lines changed

5 files changed

+289
-2
lines changed

API.md

Lines changed: 206 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,206 @@
1+
# h5fortran-MPI API
2+
3+
This document provides a listing of h5fortran-mpi `public` scoped user-facing procedures and methods with a summary of their parameters.
4+
5+
All examples assume:
6+
7+
```fortran
8+
use h5mpi, only: hdf5_file, HSIZE_T, HID_T
9+
10+
type(hdf5_file) :: h
11+
```
12+
13+
Query HDF5 library version:
14+
15+
```fortran
16+
use h5mpi, only : hdf5version
17+
18+
print *, hdf5version()
19+
```
20+
21+
## Open / close HDF5 file reference
22+
23+
More than one HDF5 file can be open in a program, by declaring unique file handle (variable) like:
24+
25+
```fortran
26+
type(hdf5_file) :: h1, h2, h3
27+
```
28+
29+
```fortran
30+
call h%open(filename, action, mpi, comp_lvl)
31+
!! Opens hdf5 file
32+
33+
character(*), intent(in) :: filename
34+
character(*), intent(in), optional :: action !< 'r', 'w', 'rw', 'r+'
35+
logical, intent(in) :: mpi !< .true.: use HDF5-MPI .false.: use serial HDF5
36+
integer, intent(in), optional :: comp_lvl !< 0: no compression. 1-9: ZLIB compression, higher is more compressior
37+
```
38+
39+
```fortran
40+
call h%close(close_hdf5_interface)
41+
!! This must be called on each HDF5 file to flush buffers to disk
42+
!! data loss can occur if program terminates before this procedure
43+
!!
44+
!! close_hdf5_interface is when you know you have exactly one HDF5 file in your
45+
!! application, if true it closes ALL files, even those invoked directly from HDF5.
46+
47+
logical, intent(in), optional :: close_hdf5_interface
48+
```
49+
50+
To avoid memory leaks or corrupted files, always "close()" all hDF5 files before STOPping the Fortran program.
51+
52+
```fortran
53+
call h%flush()
54+
!! request operating system flush data to disk.
55+
```
56+
57+
## Disk variable (dataset) inquiry
58+
59+
To allocate variables before reading data, inquire about dataset characteristics with these procedures.
60+
61+
```fortran
62+
rank = h%ndim(dataset_name)
63+
64+
character(*), intent(in) :: dataset_name
65+
```
66+
67+
Get disk dataset shape (1D vector)
68+
69+
```fortran
70+
call h%shape(dataset_name, dims)
71+
character(*), intent(in) :: dataset_name
72+
integer(HSIZE_T), intent(out), allocatable :: dims(:)
73+
```
74+
75+
Dataset "dname" data class (i.e. integer, float, string, ...)
76+
77+
```fortran
78+
integer :: class
79+
!! H5T_INTEGER_F, H5T_FLOAT_F, H5T_STRING_F
80+
class = h%class(dname)
81+
character(*), intent(in) :: dname
82+
```
83+
84+
Dataset "dname" datatype
85+
86+
```fortran
87+
integer(HID_T) :: dtype
88+
!! H5T_NATIVE_REAL, H5T_NATIVE_DOUBLE, H5T_NATIVE_INTEGER, H5T_NATIVE_CHARACTER, H5T_STD_I64LE
89+
dtype = h%dtype(dname)
90+
character(*), intent(in) :: dname
91+
```
92+
93+
Does dataset "dname" exist in this HDF5 file?
94+
95+
```fortran
96+
exists = h%exist(dname)
97+
character(*), intent(in) :: dname
98+
```
99+
100+
Is dataset "dname" contiguous on disk?
101+
102+
```fortran
103+
tf = h%is_contig(dname)
104+
!! is dataset contiguous
105+
character(*), intent(in) :: dname
106+
```
107+
108+
Is dataset compact (< 64K)
109+
110+
```fortran
111+
tf = h%is_compact(dname)
112+
!! is dataset compact layout
113+
character(*), intent(in) :: dname
114+
```
115+
116+
Is dataset chunked?
117+
118+
```fortran
119+
tf = h%is_chunked(dname)
120+
!! is dataset chunked
121+
character(*), intent(in) :: dname
122+
```
123+
124+
Is this an HDF5 file?
125+
126+
```fortran
127+
use h5mpi, only: is_hdf5
128+
129+
tf = is_hdf5('myfile.txt') !< probably false
130+
tf = is_hdf5('myfile.h5') !< true if a valid HDF5 file
131+
```
132+
133+
These are more advanced inquiries into the memory layout of the dataset, for advanced users:
134+
135+
```fortran
136+
Layout = h%layout(dname)
137+
!! integer :: H5D_CONTIGUOUS_F, H5D_CHUNKED_F, H5D_VIRTUAL_F, H5D_COMPACT_F
138+
character(*), intent(in) :: dname
139+
```
140+
141+
```fortran
142+
call h%chunks(dname, chunk_size)
143+
character(*), intent(in) :: dname
144+
integer, intent(out) :: chunk_size(:)
145+
```
146+
147+
## create dataset softlink
148+
149+
HDF5 can create dataset softlinks within an HDF5 file:
150+
151+
```fortran
152+
call h%softlink(tgt, link)
153+
154+
character(*), intent(in) :: tgt, & !< target path to link dataset
155+
link !< soft link path to create
156+
```
157+
158+
## file write operations
159+
160+
Write data from memory to disk HDF5 dataset:
161+
When file has been opened for MPI collective read via: `%open(..., mpi=.true.)` the data is distributed
162+
via MPI to the workers.
163+
If overall dataset dimensions "dset_dims" is present, data is collectively gathered from the workers as per HDF5-MPI docs.
164+
Otherwise, h5fortran-mpi assumes that root has all the data to be written and ignores the workers.
165+
166+
```fortran
167+
call h%write(dname, value, dset_dims, istart, iend, chunk_size, compact)
168+
!! write 0d..7d dataset
169+
character(*), intent(in) :: dname
170+
class(*), intent(in) :: value(..) !< array to write
171+
integer, intent(in), dimension(rank(value)), optional :: dset_dims
172+
integer, intent(in), optional, dimension(rank(value)) :: istart, iend !< array slicing for hyperslab
173+
integer, intent(in), optional :: chunk_size(rank(value)) !< override auto-chunking
174+
logical, intent(in), optional :: compact !< faster I/O for sub-64 kB datasets
175+
```
176+
177+
Write dataset attribute (e.g. units or instrument):
178+
179+
```fortran
180+
call h%writeattr(dname, attr, attrval)
181+
character(*), intent(in) :: dname, attr !< dataset name, attribute name
182+
class(*), intent(in) :: attrval(:) !< character, real, integer
183+
```
184+
185+
## file read operations
186+
187+
Read data from disk to memory:
188+
When file has been opened for MPI collective read via: `%open(..., mpi=.true.)` the data is distributed
189+
via MPI to the workers.
190+
For example, if no slicing is specified, the whole dataset is read by root and broadcast to the workers.
191+
If slicing is specified, the data is read and distributed among the workers as per HDF5-MPI docs.
192+
193+
```fortran
194+
call h%read(dname, value, istart, iend)
195+
character(*), intent(in) :: dname
196+
class(*), intent(inout) :: value(..) !< read array to this ALLOCATED variable of rank 0d..7d
197+
integer, intent(in), optional, dimension(rank(value)) :: istart, iend !< array slicing
198+
```
199+
200+
Read dataset attribute into memory:
201+
202+
```fortran
203+
call h%readattr(dname, attr, attrval)
204+
character(*), intent(in) :: dname, attr !< dataset name, attribute name
205+
class(*), intent(inout) :: attrval(:) !< character scalar; real vector, integer vector
206+
```

CITATION.cff

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,5 +5,5 @@ authors:
55
given-names: Michael
66
orcid: https://orcid.org/0000-0002-1637-6526
77
title: h5fortran-mpi
8-
doi:
8+
doi: 10.5281/zenodo.5847354
99
date-released: 2022-01-14

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
# h5fortran-mpi
22

3+
[![DOI](https://zenodo.org/badge/377901005.svg)](https://zenodo.org/badge/latestdoi/377901005)
4+
35
[![ci](https://github.com/geospace-code/h5fortran-mpi/actions/workflows/ci.yml/badge.svg)](https://github.com/geospace-code/h5fortran-mpi/actions/workflows/ci.yml)
46
[![ci_macos](https://github.com/geospace-code/h5fortran-mpi/actions/workflows/ci_macos.yml/badge.svg)](https://github.com/geospace-code/h5fortran-mpi/actions/workflows/ci_macos.yml)
57
[![intel-oneapi](https://github.com/geospace-code/h5fortran-mpi/actions/workflows/intel-oneapi.yml/badge.svg)](https://github.com/geospace-code/h5fortran-mpi/actions/workflows/intel-oneapi.yml)

codemeta.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
"issueTracker": "https://github.com/geospace-code/h5fortran-mpi/issues",
99
"name": "h5fortran-mpi",
1010
"version": "1.0.0",
11-
"identifier": "",
11+
"identifier": "10.5281/zenodo.5847354",
1212
"description": "Lightweight object-oriented HDF5-MPI parallel Fortran interface",
1313
"applicationCategory": "file I/O",
1414
"developmentStatus": "active",

scripts/CMakeLists.txt

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
cmake_minimum_required(VERSION 3.20...3.22)
2+
project(HDF5_build
3+
LANGUAGES C Fortran
4+
)
5+
6+
option(hdf5_parallel "build HDF5 parallel MPI" on)
7+
8+
if(NOT HDF5_VERSION)
9+
set(HDF5_VERSION 1.12.1) # default version to build
10+
endif()
11+
12+
# --- system checks
13+
if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
14+
message(FATAL_ERROR "please specify where to install HDF5 under, like
15+
cmake -B build -DCMAKE_INSTALL_PREFIX=~/mylibs")
16+
endif()
17+
18+
list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/../cmake/Modules/)
19+
20+
21+
if(NOT MPI_ROOT AND DEFINED ENV{MPI_ROOT})
22+
set(MPI_ROOT $ENV{MPI_ROOT})
23+
endif()
24+
25+
if(CMAKE_SYSTEM_NAME STREQUAL Linux AND MPI_ROOT)
26+
set(ld_path $ENV{LD_LIBRARY_PATH})
27+
cmake_path(CONVERT "${ld_path}" TO_CMAKE_PATH_LIST ld_path NORMALIZE)
28+
cmake_path(CONVERT "${MPI_ROOT}" TO_CMAKE_PATH_LIST MPI_ROOT NORMALIZE)
29+
30+
if(NOT "${ld_path}" MATCHES "${MPI_ROOT}/lib")
31+
message(WARNING "${MPI_ROOT}/lib not found in LD_LIBRARY_PATH: $ENV{LD_LIBRARY_PATH}
32+
HDF5 build may fail due to bugs in HDF5 package CMake scripts.
33+
Fix this by adding to ~/.bashrc or similar:
34+
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${MPI_ROOT}/lib")
35+
endif()
36+
endif()
37+
38+
# HDF5 install fails to work (link) if prior HDF5 library is installed there
39+
find_library(_hdf5_libprior NAMES hdf5 PATHS ${CMAKE_INSTALL_PREFIX} PATH_SUFFIXES lib NO_DEFAULT_PATH NO_CACHE)
40+
find_path(_hdf5_incprior NAMES hdf5.h PATHS ${CMAKE_INSTALL_PREFIX} PATH_SUFFIXES include NO_DEFAULT_PATH NO_CACHE)
41+
find_program(_hdf5_binprior NAMES h5cc PATHS ${CMAKE_INSTALL_PREFIX} PATH_SUFFIXES bin NO_DEFAULT_PATH NO_CACHE)
42+
if(_hdf5_binprior)
43+
cmake_path(GET _hdf5_binprior PARENT_PATH _hdf5_binprior)
44+
else()
45+
set(_hdf5_binprior "")
46+
endif()
47+
if(_hdf5_libprior)
48+
cmake_path(GET _hdf5_libprior PARENT_PATH _hdf5_libprior)
49+
endif()
50+
if(_hdf5_libprior OR _hdf5_incprior OR _hdf5_binprior)
51+
message(FATAL_ERROR "HDF5 library already installed:
52+
${_hdf5_libprior}
53+
${_hdf5_incprior}
54+
${_hdf5_binprior}
55+
Please pick a new install location or completely remove the old HDF5 install directory.
56+
Otherwise, HDF5 will fail to link correctly with prior version and this version mixed.")
57+
endif()
58+
59+
# --- commence HDF5 build/install
60+
include(${PROJECT_SOURCE_DIR}/../cmake/libraries.cmake)
61+
62+
set_directory_properties(PROPERTIES EP_UPDATE_DISCONNECTED true)
63+
64+
message(STATUS "Build / install HDF5 ${HDF5_VERSION} to ${CMAKE_INSTALL_PREFIX}")
65+
66+
if(hdf5_parallel)
67+
find_package(MPI COMPONENTS C REQUIRED)
68+
include(${PROJECT_SOURCE_DIR}/../cmake/check_mpi.cmake)
69+
check_mpi_version()
70+
endif()
71+
72+
include(${PROJECT_SOURCE_DIR}/../cmake/hdf5.cmake)
73+
74+
# --- features
75+
include(FeatureSummary)
76+
77+
add_feature_info(HDF5parallel hdf5_parallel "HDF5 MPI layer")
78+
79+
feature_summary(WHAT ENABLED_FEATURES DISABLED_FEATURES)

0 commit comments

Comments
 (0)