@@ -566,46 +566,65 @@ logical function is_hdf5(filename)
566
566
end function is_hdf5
567
567
568
568
569
- subroutine mpi_hyperslab (mem_dims , dset_dims , dset_id , filespace , memspace , dname , istart , iend )
569
+ function id2name (id )
570
+ ! ! get name of object with given id
571
+
572
+ integer (HID_T) :: id
573
+ character (:), allocatable :: id2name
574
+
575
+ integer (SIZE_T) :: L
576
+ integer :: ierr
577
+
578
+ character (2048 ) :: name
579
+
580
+ call h5iget_name_f(id, name, len (name, SIZE_T), L, ierr)
581
+ if (ierr /= 0 ) error stop " h5fortran:id2name:h5iget_name"
582
+
583
+ id2name = name (:L)
584
+
585
+ end function id2name
586
+
587
+
588
+ subroutine mpi_hyperslab (mem_dims , dset_dims , dset_id , filespace , memspace , istart , iend )
570
589
! ! Each process defines dataset in memory and writes it to the hyperslab in the file.
571
590
572
591
integer (HSIZE_T), dimension (:), intent (in ) :: mem_dims, dset_dims
573
592
integer (HID_T), intent (in ) :: dset_id
574
593
integer (HID_T), intent (inout ) :: filespace, memspace
575
- character (* ), intent (in ) :: dname ! < for error messages
576
594
integer (HSIZE_T), dimension (:), intent (in ) :: istart
577
595
integer (HSIZE_T), dimension (size (istart)), intent (in ) :: iend
578
596
579
597
integer (HSIZE_T), dimension (size (mem_dims)) :: c_mem_dims, i0
580
598
integer (HID_T) :: dcpl
581
599
integer :: ierr
582
-
583
600
logical :: filters_OK
601
+ character (:), allocatable :: dset_name
602
+
603
+ dset_name = id2name(dset_id)
584
604
585
605
! > check that all necessary filters to access dataset are available on the system.
586
606
call h5dget_create_plist_f(dset_id, dcpl, ierr)
587
- if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5dget_create_plist: " // dname
607
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5dget_create_plist: " // dset_name
588
608
589
609
call h5pall_filters_avail_f(dcpl, filters_OK, ierr)
590
- if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5pall_filters_avail: " // dname
610
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5pall_filters_avail: " // dset_name
591
611
if (.not. filters_OK) then
592
- error stop " h5fortran: filter(s) missing necessary for dataset " // dname // " in parallel with MPI. This is " // &
612
+ error stop " h5fortran: filter(s) missing necessary for dataset " // dset_name // " in parallel with MPI. This is " // &
593
613
" typically caused by missing DEFLATE compression with HDF5-MPI."
594
614
endif
595
615
596
616
call h5pclose_f(dcpl, ierr)
597
- if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5pclose: " // dname
617
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5pclose: " // dset_name
598
618
599
619
if (filespace == H5S_ALL_F) then
600
620
! > create dataspace
601
621
call h5screate_simple_f(rank= size (dset_dims), dims= dset_dims, space_id= filespace, hdferr= ierr)
602
- if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5screate_simple:filespace " // dname
622
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5screate_simple:filespace " // dset_name
603
623
endif
604
624
605
625
! > Select hyperslab in the file.
606
626
call h5dget_space_f(dset_id, filespace, ierr)
607
- if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5dget_space: " // dname
608
-
627
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5dget_space: " // dset_name
609
628
610
629
611
630
! blk(1) = 1
@@ -621,9 +640,9 @@ subroutine mpi_hyperslab(mem_dims, dset_dims, dset_id, filespace, memspace, dnam
621
640
error stop " ERROR:h5fortran:mpi_hyperslab"
622
641
endif
623
642
624
- ! print *, 'TRACE:mpi_hyperslab: ' // dname //': istart', i0, 'C mem_dims: ', c_mem_dims, 'mem_dims', mem_dims
643
+ ! print *, 'TRACE:mpi_hyperslab: ' // dset_name //': istart', i0, 'C mem_dims: ', c_mem_dims, 'mem_dims', mem_dims
625
644
626
- if (any (c_mem_dims < 1 )) error stop " h5mpi:hyperslab:non-positive hyperslab: " // dname
645
+ if (any (c_mem_dims < 1 )) error stop " h5mpi:hyperslab:non-positive hyperslab: " // dset_name
627
646
628
647
call h5sselect_hyperslab_f(filespace, H5S_SELECT_SET_F, &
629
648
start= i0, &
@@ -632,11 +651,11 @@ subroutine mpi_hyperslab(mem_dims, dset_dims, dset_id, filespace, memspace, dnam
632
651
! stride=1, & !< for now we don't stride data
633
652
! block=blk !< would this help performance?
634
653
635
- if (ierr/= 0 ) error stop " g5fortran:mpi_hyperslab:h5sselect_hyperslab: " // dname
654
+ if (ierr/= 0 ) error stop " g5fortran:mpi_hyperslab:h5sselect_hyperslab: " // dset_name
636
655
637
656
! > create memory dataspace
638
657
call h5screate_simple_f(rank= size (c_mem_dims), dims= c_mem_dims, space_id= memspace, hdferr= ierr)
639
- if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5screate_simple:memspace " // dname
658
+ if (ierr/= 0 ) error stop " h5fortran:mpi_hyperslab:h5screate_simple:memspace " // dset_name
640
659
641
660
end subroutine mpi_hyperslab
642
661
0 commit comments