From be278422d7ab7d8cb6020e97c1910bee3d94cdab Mon Sep 17 00:00:00 2001 From: Thomas VINCENT Date: Fri, 25 Nov 2022 16:22:44 +0100 Subject: [PATCH] Stop using FILTER_ID --- doc/contribute.rst | 6 ++---- src/hdf5plugin/__init__.py | 22 ++++++++++++-------- src/hdf5plugin/_filters.py | 42 ++++++++------------------------------ test/test.py | 12 +++++------ 4 files changed, 30 insertions(+), 52 deletions(-) diff --git a/doc/contribute.rst b/doc/contribute.rst index 2bfc56f3..7957fd0f 100644 --- a/doc/contribute.rst +++ b/doc/contribute.rst @@ -52,18 +52,16 @@ This briefly describes the steps to add a HDF5 compression filter to the zoo. * In case of import errors related to HDF5-related undefined symbols, add eventual missing functions under ``src/hdf5_dl.c``. -* Add a "CONSTANT" in ``src/hdf5plugin/_filters.py`` named with the ``FILTER_NAME_ID`` which value is the HDF5 filter ID - (See `HDF5 registered filters `_). - * Add a compression options helper class named ``FilterName`` in ``hdf5plugins/_filters.py`` which should inherit from ``_FilterRefClass``. This is intended to ease the usage of ``h5py.Group.create_dataset`` ``compression_opts`` argument. It must have a `filter_name` class attribute with the same name as in the extension defined in ``setup.py`` (without the ``libh5`` prefix) . This name is used to find the filter library. + The `filter_id` should advertise the `HDF5 registered filters `_ ID. * Add ``FilterName`` to ``hdf5plugin._filters.FILTER_CLASSES``. * Add to ``hdf5plugin/__init__.py`` the import of the filter ID and helper class: - ``from ._filters import FILTER_NAME_ID, FilterName # noqa`` + ``from ._filters import FilterName # noqa`` * Add tests: diff --git a/src/hdf5plugin/__init__.py b/src/hdf5plugin/__init__.py index bb331f1c..36ac14e9 100644 --- a/src/hdf5plugin/__init__.py +++ b/src/hdf5plugin/__init__.py @@ -31,19 +31,25 @@ from ._version import version, version_info # noqa from ._filters import FILTERS # noqa -from ._filters import BLOSC_ID, Blosc # noqa -from ._filters import BSHUF_ID, Bitshuffle # noqa -from ._filters import BZIP2_ID, BZip2 # noqa -from ._filters import LZ4_ID, LZ4 # noqa -from ._filters import FCIDECOMP_ID, FciDecomp # noqa -from ._filters import ZFP_ID, Zfp # noqa -from ._filters import ZSTD_ID, Zstd # noqa -from ._filters import SZ_ID, SZ # noqa +from ._filters import Blosc # noqa +from ._filters import Bitshuffle # noqa +from ._filters import BZip2 # noqa +from ._filters import LZ4 # noqa +from ._filters import FciDecomp # noqa +from ._filters import Zfp # noqa +from ._filters import Zstd # noqa +from ._filters import SZ # noqa from ._utils import get_config, get_filters, PLUGIN_PATH, register # noqa # Backward compatibility PLUGINS_PATH = PLUGIN_PATH +BLOSC_ID = Blosc.filter_id +BSHUF_ID = Bitshuffle.filter_id +LZ4_ID = LZ4.filter_id +FCIDECOMP_ID = FciDecomp.filter_id +ZFP_ID = Zfp.filter_id +ZSTD_ID = Zstd.filter_id def __getattr__(name): diff --git a/src/hdf5plugin/_filters.py b/src/hdf5plugin/_filters.py index 2444299f..41eb29c5 100644 --- a/src/hdf5plugin/_filters.py +++ b/src/hdf5plugin/_filters.py @@ -34,32 +34,6 @@ logger = logging.getLogger(__name__) -# IDs of provided filters -BLOSC_ID = 32001 -"""Blosc filter ID""" - -BZIP2_ID = 307 -"""Bzip2 filter ID""" - -LZ4_ID = 32004 -"""LZ4_ID filter ID""" - -BSHUF_ID = 32008 -"""Bitshuffle filter ID""" - -ZFP_ID = 32013 -"""ZFP filter ID""" - -ZSTD_ID = 32015 -"""Zstandard filter ID""" - -SZ_ID = 32017 -"""SZ filter ID""" - -FCIDECOMP_ID = 32018 -"""FCIDECOMP filter ID""" - - try: _FilterRefClass = h5py.filters.FilterRefBase except AttributeError: @@ -118,7 +92,7 @@ class Bitshuffle(_FilterRefClass): Default: 3. """ filter_name = "bshuf" - filter_id = BSHUF_ID + filter_id = 32008 __COMPRESSIONS = { 'none': 0, @@ -192,7 +166,7 @@ class Blosc(_FilterRefClass): """Flag to enable bit-wise shuffle pre-compression filter""" filter_name = "blosc" - filter_id = BLOSC_ID + filter_id = 32001 __COMPRESSIONS = { 'blosclz': 0, @@ -228,7 +202,7 @@ class BZip2(_FilterRefClass): :param int blocksize: Size of the blocks as a multiple of 100k """ filter_name = "bzip2" - filter_id = BZIP2_ID + filter_id = 307 def __init__(self, blocksize=9) -> None: blocksize = int(blocksize) @@ -251,7 +225,7 @@ class FciDecomp(_FilterRefClass): f.close() """ filter_name = "fcidecomp" - filter_id = FCIDECOMP_ID + filter_id = 32018 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -279,7 +253,7 @@ class LZ4(_FilterRefClass): Default: 0 (for 1GB per block). """ filter_name = "lz4" - filter_id = LZ4_ID + filter_id = 32004 def __init__(self, nbytes=0): nbytes = int(nbytes) @@ -369,7 +343,7 @@ class Zfp(_FilterRefClass): It controls the absolute error. """ filter_name = "zfp" - filter_id = ZFP_ID + filter_id = 32013 def __init__(self, rate=None, @@ -463,7 +437,7 @@ class SZ(_FilterRefClass): """ filter_name = "sz" - filter_id = SZ_ID + filter_id = 32017 def __init__(self, absolute=None, relative=None, pointwise_relative=None): if (absolute, relative, pointwise_relative).count(None) < 2: @@ -527,7 +501,7 @@ class Zstd(_FilterRefClass): f.close() """ filter_name = "zstd" - filter_id = ZSTD_ID + filter_id = 32015 def __init__(self, clevel=3): assert 1 <= clevel <= 22 diff --git a/test/test.py b/test/test.py index ff236dad..336d294a 100644 --- a/test/test.py +++ b/test/test.py @@ -39,7 +39,7 @@ class TestHDF5PluginRead(unittest.TestCase): """Test reading existing files with compressed data""" - @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.BLOSC_ID), + @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.Blosc.filter_id), "Blosc filter not available") def testBlosc(self): """Test reading Blosc compressed data""" @@ -62,7 +62,7 @@ def testBlosc(self): target.shape = expected_shape self.assertTrue(numpy.allclose(data, target), "Incorrect readout") - @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.LZ4_ID), + @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.LZ4.filter_id), "LZ4 filter not available") def testLZ4(self): """Test reading lz4 compressed data""" @@ -79,7 +79,7 @@ def testLZ4(self): self.assertTrue(data.shape[2] == 2070, "Incorrect shape") self.assertTrue(data[21, 1911, 1549] == 3141, "Incorrect value") - @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.BSHUF_ID), + @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.Bitshuffle.filter_id), "Bitshuffle filter not available") def testBitshuffle(self): """Test reading bitshuffle compressed data""" @@ -96,7 +96,7 @@ def testBitshuffle(self): self.assertTrue(data.shape[2] == 2070, "Incorrect shape") self.assertTrue(data[0, 1372, 613] == 922, "Incorrect value") - @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.FCIDECOMP_ID), + @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.FciDecomp.filter_id), "FCIDECOMP filter not available") def testFcidecomp(self): """Test reading FCIDECOMP compressed data""" @@ -115,7 +115,7 @@ def testFcidecomp(self): self.assertTrue(numpy.alltrue(data == expected_data), "Incorrect values read") - @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.ZFP_ID), + @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.Zfp.filter_id), "ZFP filter not available") def testZfp(self): """Test reading ZFP compressed data""" @@ -137,7 +137,7 @@ def testZfp(self): self.assertTrue(numpy.allclose(original, compressed), "Values should be close") - @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.SZ_ID), + @unittest.skipUnless(h5py.h5z.filter_avail(hdf5plugin.SZ.filter_id), "SZ filter not available") def testSZ(self): """Test reading SZ compressed data"""