Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 26 additions & 1 deletion .github/workflows/cmake-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -363,4 +363,29 @@ jobs:
- name: Stop the docker
run: |
docker exec --user root build /bin/bash -c "chown -R 1001 /home/p00user/src "
docker container stop build
docker container stop build

pixi_tests:
runs-on: ubuntu-latest
container:
image: debian:trixie
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2

- name: install pixi
shell: bash
run: |
apt-get update
apt-get -qq -y dist-upgrade
apt-get -qq update && apt-get install -qq -y flake8 python3 curl bash
curl -fsSL https://pixi.sh/install.sh | sh
export PATH=/github/home/.pixi/bin:$PATH
pixi shell-hook > .sh.sh
source .sh.sh
pixi add rattler-build compilers
- name: build h5cpp
shell: bash
run: |
source .sh.sh
pixi run rattler-build build --recipe .github/workflows/pixi/recipe.yaml
67 changes: 67 additions & 0 deletions .github/workflows/pixi/recipe.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
context:
version: 0.7.1

package:
name: h5cpp
version: ${{ version }}

source:
- path: ../../../

build:
number: 2
script:
- cmake -B build ${CMAKE_ARGS} -DH5CPP_CONAN=DISABLE -DCMAKE_INSTALL_PREFIX=${PREFIX}
- cmake --build build
- cmake --install build
- ctest --test-dir build --extra-verbose --no-tests=error

requirements:
build:
- hdf5
- cmake
- ninja
- zlib
- make
host:
- hdf5
- catch2
- zlib
run_exports:
- ${{ pin_subpackage("h5cpp", upper_bound="x.x") }}

tests:
- package_contents:
include:
- h5cpp/file/file.hpp
- h5cpp/core/*.hpp
- h5cpp/attribute/*.hpp
- h5cpp/dataspace/*.hpp
- h5cpp/datatype/*.hpp
- h5cpp/error/*.hpp
- h5cpp/file/*.hpp
- h5cpp/filter/*.hpp
- h5cpp/node/*.hpp
- h5cpp/property/*.hpp
- h5cpp/utilities/*.hpp
- h5cpp/contrib/nexus/ebool.hpp
- h5cpp/contrib/stl/*.hpp
lib:
- h5cpp

about:
homepage: https://github.com/ess-dmsc/h5cpp
license: LGPL-2.1-only
license_file: LICENSE
summary: C++ wrapper for hdf5
description: |
h5cpp is C++ wrapper for hdf5
which significantly simplifies development of HDF5 code.
It provides a high level abstraction to the HDF5 low-level types.
documentation: https://ess-dmsc.github.io/h5cpp
repository: https://github.com/ess-dmsc/h5cpp

extra:
recipe-maintainers:
- jkotan
- yuelongyu
4 changes: 4 additions & 0 deletions .github/workflows/pixi/variants.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
zlib:
- 1
# hdf5:
# - 2.0.0
15 changes: 15 additions & 0 deletions pixi.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
[workspace]
authors = ["Jan Kotanski <jankotan@gmail.com>"]
channels = ["conda-forge"]
name = "h5cpp"
platforms = ["linux-64"]
# platforms = ["linux-64", "linux-aarch64", "osx-arm64", "osx-64", "win-64"]
version = "0.7.1"

[tasks]

[dependencies]
rattler-build = ">=0.60.0,<0.61"
ipython = ">=9.11.0,<10"
python = "==3.14"
conda-smithy = ">=3.56.3,<4"
103 changes: 102 additions & 1 deletion src/h5cpp/node/dataset.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,50 @@ class DLL_EXPORT Dataset : public Node
property::DatasetTransferList::get()) const;


//!
//! \brief read dataset chunk (*since hdf5 1.10.2*)
//!
//! Read a chunk from a dataset to an instance of T with given byte size.
//!
//! \throws std::runtime_error in case of a failure
//! \tparam T source type
//! \param data reference to the source instance of T
//! \param byte_size of data
//! \param offset logical position of the first element of the chunk in the dataset's dataspace
//! \param dtpl reference to a dataset transfer property list
//! \return filter_mask mask of which filters are used with the chunk
//!
template<typename T>
std::uint32_t read_chunk(T &data,
size_t byte_size,
std::vector<hsize_t> offset,
const property::DatasetTransferList &dtpl =
property::DatasetTransferList::get()) const;


//!
//! \brief read dataset chunk
//!
//! Read a chunk from a dataset to an instance of T.
//!
//! \throws std::runtime_error in case of a failure
//! \tparam T source type
//! \param data reference to the source instance of T
//! \param byte_size of data
//! \param mem_type reference to the memory data type
//! \param offset logical position of the first element of the chunk in the dataset's dataspace
//! \param dtpl reference to a dataset transfer property list
//! \return filter_mask mask of which filters are used with the chunk
//!
template<typename T>
std::uint32_t read_chunk(T &data,
size_t byte_size,
const datatype::Datatype &mem_type,
std::vector<hsize_t> & offset,
const property::DatasetTransferList &dtpl =
property::DatasetTransferList::get()) const;


//!
//! \brief read dataset chunk
//!
Expand Down Expand Up @@ -940,6 +984,16 @@ std::uint32_t Dataset::read_chunk(T &data,
return read_chunk(data, mem_type_holder.get(data), offset, dtpl);
}

template<typename T>
std::uint32_t Dataset::read_chunk(T &data,
size_t byte_size,
std::vector<hsize_t> offset,
const property::DatasetTransferList &dtpl) const
{
hdf5::datatype::DatatypeHolder mem_type_holder;
return read_chunk(data, byte_size, mem_type_holder.get(data), offset, dtpl);
}

template<typename T>
std::uint32_t Dataset::read_chunk(T &data,
const datatype::Datatype &mem_type,
Expand All @@ -949,7 +1003,18 @@ std::uint32_t Dataset::read_chunk(T &data,
std::uint32_t filter_mask;
if(mem_type.get_class() == datatype::Class::Integer)
{
#if H5_VERSION_GE(1,10,3)
#if H5_VERSION_GE(2,0,0)
if(H5Dread_chunk1(static_cast<hid_t>(*this),
static_cast<hid_t>(dtpl),
offset.data(),
&filter_mask,
dataspace::ptr(data))<0)
{
std::stringstream ss;
ss<<"Failure to read chunk data from dataset ["<<link().path()<<"]!";
error::Singleton::instance().throw_with_stack(ss.str());
}
#elif H5_VERSION_GE(1,10,3)
if(H5Dread_chunk(static_cast<hid_t>(*this),
static_cast<hid_t>(dtpl),
offset.data(),
Expand Down Expand Up @@ -982,6 +1047,42 @@ std::uint32_t Dataset::read_chunk(T &data,
return filter_mask;
}


template<typename T>
std::uint32_t Dataset::read_chunk(T &data,
size_t byte_size,
const datatype::Datatype &mem_type,
std::vector<hsize_t> & offset,
const property::DatasetTransferList &dtpl) const
{
std::uint32_t filter_mask;
if(mem_type.get_class() == datatype::Class::Integer)
{
#if H5_VERSION_GE(2,0,0)
if(H5Dread_chunk(static_cast<hid_t>(*this),
static_cast<hid_t>(dtpl),
offset.data(),
&filter_mask,
dataspace::ptr(data), &byte_size)<0)
{
std::stringstream ss;
ss<<"Failure to read chunk data from dataset ["<<link().path()<<"]!";
error::Singleton::instance().throw_with_stack(ss.str());
}
#else
read_chunk(data, mem_type, offset, dtpl);
#endif
}
else
{
std::stringstream ss;
ss<<"Failure to read non-integer chunk data from dataset ["<<link().path()<<"]!";
error::Singleton::instance().throw_with_stack(ss.str());
}
return filter_mask;
}


#endif

template<typename T>
Expand Down
3 changes: 3 additions & 0 deletions src/h5cpp/property/file_access.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ enum class LibVersion : std::underlying_type<H5F_libver_t>::type {
#endif
#if H5_VERSION_GE(1,13,0)
V114 = H5F_LIBVER_V114,
#endif
#if H5_VERSION_GE(2,0,0)
V200 = H5F_LIBVER_V200,
#endif
Earliest = H5F_LIBVER_EARLIEST
};
Expand Down
14 changes: 14 additions & 0 deletions test/node/dataset_direct_chunk_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,20 @@ SCENARIO("testing dataset access via chunks") {
dataset.read(read_value, framespace);
REQUIRE(frame == read_value);
}
AND_THEN("we can read chunk the data back") {
UShorts read_chunk_value(xdim * ydim);
for (long long unsigned int i = 0; i != nframe; i++) {
dataset.read_chunk(read_chunk_value, {i, 0, 0});
REQUIRE(frame == read_chunk_value);
}
AND_THEN("we can read chunk the data back with given buffer byte_size") {
UShorts read_chunk_svalue(xdim * ydim);
for (long long unsigned int i = 0; i != nframe; i++) {
dataset.read_chunk(read_chunk_svalue, xdim * ydim * sizeof(UShorts), {i, 0, 0});
REQUIRE(frame == read_chunk_svalue);
}
}
}
}
}
}
Expand Down
Loading