Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Basic support for HDF5 filters #350

Merged
merged 4 commits into from
Aug 31, 2023
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 29 additions & 1 deletion kerchunk/hdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,12 +189,40 @@ def _transfer_attrs(
f"TypeError transferring attr, skipping:\n {n}@{h5obj.name} = {v} ({type(v)})"
)

def _decode_blosc(properties): # 32001
blosc_compressors = ('blosclz', 'lz4', 'lz4hc', 'snappy', 'zlib', 'zstd')
_1, _2, bytes_per_num, total_bytes, clevel, shuffle, compressor = properties
return dict (id="blosc", blocksize=total_bytes, clevel=clevel, shuffle=shuffle, cname=blosc_compressors[compressor],)

def _decode_zstd(properties): #32015
return dict (id='zstd', level=properties[0],)


decoders = { "32001" : _decode_blosc,
"32015" : _decode_zstd,
}

def _decode_filters(self, h5obj: Union[h5py.Dataset, h5py.Group]):
if len(h5obj._filters.keys()) > 1:
raise RuntimeError(
f"{h5obj.name} uses multiple filters {list (h5obj._filters.keys())}. This is not supported by kerchunk."
Copy link
Member

@martindurant martindurant Aug 15, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This came up in #342 - actually zarr does support multiple filters, except only the final one is called "compression"

)
for filter_id, properties in h5obj._filters.items():
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am 95% sure that setting a list like

filters=[numcodecs.get_codec(self.decoders[filter_id](properties) for filter_id, properties in h5obj._filters.items()]

will work fine with compression=None (or we can make the last one the compression), if we include gzip/zlib in this function. Of course, would need to check the order assumed by numcodecs matches hdf.

if not str(filter_id) in self.decoders.keys():
raise RuntimeError(
f"{h5obj.name} uses filter id {filter_id} with properties {properties}, not supported by kerchunk., supported filters are {self.decoders.keys()}"
)
else:
return numcodecs.get_codec(self.decoders[filter_id](properties))


def _translator(self, name: str, h5obj: Union[h5py.Dataset, h5py.Group]):
"""Produce Zarr metadata for all groups and datasets in the HDF5 file."""
try: # method must not raise exception
kwargs = {}
if isinstance(h5obj, h5py.Dataset):
lggr.debug(f"HDF5 dataset: {h5obj.name}")
lggr.debug (f"HDF5 compression: {h5obj.compression}")
if h5obj.id.get_create_plist().get_layout() == h5py.h5d.COMPACT:
# Only do if h5obj.nbytes < self.inline??
kwargs["data"] = h5obj[:]
Expand All @@ -214,7 +242,7 @@ def _translator(self, name: str, h5obj: Union[h5py.Dataset, h5py.Group]):
if h5obj.compression == "gzip":
compression = numcodecs.Zlib(level=h5obj.compression_opts)
else:
compression = None
compression = self._decode_filters(h5obj)
filters = []
dt = None
# Get storage info of this HDF5 dataset...
Expand Down
Loading