@@ -13,9 +13,9 @@ module h5mpi
13
13
H5P_DEFAULT_F, H5P_FILE_ACCESS_F, H5P_DATASET_CREATE_F, H5P_DATASET_XFER_F, &
14
14
H5S_ALL_F, H5S_SELECT_SET_F, &
15
15
H5D_CHUNKED_F, H5D_CONTIGUOUS_F, H5D_COMPACT_F, &
16
- h5dcreate_f, h5dopen_f, h5dclose_f, h5dget_space_f, &
16
+ h5dcreate_f, h5dopen_f, h5dclose_f, h5dget_space_f, h5dget_create_plist_f, &
17
17
h5fopen_f, h5fclose_f, h5fcreate_f, h5fget_filesize_f, h5fflush_f, &
18
- h5pcreate_f, h5pclose_f, h5pset_chunk_f, h5pset_dxpl_mpio_f, h5pset_fapl_mpio_f, &
18
+ h5pcreate_f, h5pclose_f, h5pset_chunk_f, h5pset_dxpl_mpio_f, h5pset_fapl_mpio_f, h5pall_filters_avail_f, &
19
19
h5sselect_hyperslab_f, h5screate_simple_f, h5sclose_f, &
20
20
h5get_libversion_f, &
21
21
h5open_f, h5close_f
@@ -456,18 +456,36 @@ subroutine mpi_hyperslab(mem_dims, dset_dims, dset_id, filespace, memspace, dnam
456
456
integer (HSIZE_T), dimension (size (istart)), intent (in ) :: iend
457
457
458
458
integer (HSIZE_T), dimension (size (mem_dims)) :: c_mem_dims, i0
459
+ integer (HID_T) :: dcpl
459
460
integer :: ierr
460
461
462
+ logical :: filters_OK
463
+
464
+ ! > check that all necessary filters to access dataset are available on the system.
465
+ call h5dget_create_plist_f(dset_id, dcpl, ierr)
466
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5dget_create_plist: " // dname
467
+
468
+ call h5pall_filters_avail_f(dcpl, filters_OK, ierr)
469
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5pall_filters_avail: " // dname
470
+ if (.not. filters_OK) then
471
+ error stop " h5fortran: filter(s) missing necessary for dataset " // dname // " in parallel with MPI. This is " // &
472
+ " typically caused by missing DEFLATE compression with HDF5-MPI."
473
+ endif
474
+
475
+ call h5pclose_f(dcpl, ierr)
476
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5pclose: " // dname
461
477
462
478
if (filespace == H5S_ALL_F) then
463
479
! > create dataspace
464
480
call h5screate_simple_f(rank= size (dset_dims), dims= dset_dims, space_id= filespace, hdferr= ierr)
465
- if (ierr/= 0 ) error stop " h5screate_simple:filespace " // dname
481
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab: h5screate_simple:filespace " // dname
466
482
endif
467
483
468
484
! > Select hyperslab in the file.
469
485
call h5dget_space_f(dset_id, filespace, ierr)
470
- if (ierr/= 0 ) error stop " h5dget_space: " // dname
486
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5dget_space: " // dname
487
+
488
+
471
489
472
490
! blk(1) = 1
473
491
! blk(2:) = dset_dims(2:)
@@ -493,11 +511,11 @@ subroutine mpi_hyperslab(mem_dims, dset_dims, dset_id, filespace, memspace, dnam
493
511
! stride=1, & !< for now we don't stride data
494
512
! block=blk !< would this help performance?
495
513
496
- if (ierr/= 0 ) error stop " h5sselect_hyperslab: " // dname
514
+ if (ierr/= 0 ) error stop " g5fortran:mpi_hyperslab: h5sselect_hyperslab: " // dname
497
515
498
516
! > create memory dataspace
499
517
call h5screate_simple_f(rank= size (c_mem_dims), dims= c_mem_dims, space_id= memspace, hdferr= ierr)
500
- if (ierr/= 0 ) error stop " h5fortran:h5screate_simple:memspace " // dname
518
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab: h5screate_simple:memspace " // dname
501
519
502
520
end subroutine mpi_hyperslab
503
521
0 commit comments