diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0cc5da870..9f5814605 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,7 @@ repos: # hooks: # - id: black - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.3 + rev: v0.8.0 hooks: - id: ruff # - repo: https://github.com/econchick/interrogate diff --git a/CHANGELOG.md b/CHANGELOG.md index a8a97ff58..438a0c7df 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ ## HDMF 4.0.0 (Upcoming) ### Deprecations -- The following classes have been deprecated and removed: Array, AbstractSortedArray, SortedArray, LinSpace, Query, RegionSlicer, ListSlicer, H5RegionSlicer, DataRegion. The following methods have been deprecated and removed: fmt_docval_args, call_docval_func, get_container_cls, add_child, set_dataio (now refactored as set_data_io). We have also removed all early evelopment for region references. @mavaylon1 [#1998](https://github.com/hdmf-dev/hdmf/pull/1198) +- The following classes have been deprecated and removed: Array, AbstractSortedArray, SortedArray, LinSpace, Query, RegionSlicer, ListSlicer, H5RegionSlicer, DataRegion, RegionBuilder. The following methods have been deprecated and removed: fmt_docval_args, call_docval_func, get_container_cls, add_child, set_dataio (now refactored as set_data_io). We have also removed all early development for region references. @mavaylon1, @rly [#1998](https://github.com/hdmf-dev/hdmf/pull/1198), [#1212](https://github.com/hdmf-dev/hdmf/pull/1212) - Python 3.8 has been deprecated. Python 3.9 is the new minimum with support for Python 3.13. @mavaylon1 [#1209](https://github.com/hdmf-dev/hdmf/pull/1209) ### Enhancements diff --git a/docs/source/conf.py b/docs/source/conf.py index c20869e12..d385630d2 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -87,7 +87,6 @@ nitpicky = True nitpick_ignore = [('py:class', 'Intracomm'), - ('py:class', 'h5py.RegionReference'), ('py:class', 'h5py._hl.dataset.Dataset'), ('py:class', 'function'), ('py:class', 'unittest.case.TestCase'), diff --git a/docs/source/overview_software_architecture.rst b/docs/source/overview_software_architecture.rst index 973a01b2f..d63c953fe 100644 --- a/docs/source/overview_software_architecture.rst +++ b/docs/source/overview_software_architecture.rst @@ -68,7 +68,7 @@ Builder * :py:class:`~hdmf.build.builders.GroupBuilder` - represents a collection of objects * :py:class:`~hdmf.build.builders.DatasetBuilder` - represents data * :py:class:`~hdmf.build.builders.LinkBuilder` - represents soft-links - * :py:class:`~hdmf.build.builders.RegionBuilder` - represents a slice into data (Subclass of :py:class:`~hdmf.build.builders.DatasetBuilder`) + * :py:class:`~hdmf.build.builders.ReferenceBuilder` - represents a reference to another group or dataset * **Main Module:** :py:class:`hdmf.build.builders` diff --git a/src/hdmf/backends/hdf5/h5tools.py b/src/hdmf/backends/hdf5/h5tools.py index bf8f86bb9..d30cef06c 100644 --- a/src/hdmf/backends/hdf5/h5tools.py +++ b/src/hdmf/backends/hdf5/h5tools.py @@ -908,8 +908,6 @@ def get_type(cls, data): "utf-8": H5_TEXT, "ascii": H5_BINARY, "bytes": H5_BINARY, - "ref": H5_REF, - "reference": H5_REF, "object": H5_REF, "isodatetime": H5_TEXT, "datetime": H5_TEXT, @@ -1492,7 +1490,7 @@ def __is_ref(self, dtype): if isinstance(dtype, dict): # may be dict from reading a compound dataset return self.__is_ref(dtype['dtype']) if isinstance(dtype, str): - return dtype == DatasetBuilder.OBJECT_REF_TYPE or dtype == DatasetBuilder.REGION_REF_TYPE + return dtype == DatasetBuilder.OBJECT_REF_TYPE return False def __queue_ref(self, func): @@ -1541,7 +1539,7 @@ def generate_dataset_html(dataset): array_info_dict = get_basic_array_info(dataset) if isinstance(dataset, h5py.Dataset): - + dataset_type = "HDF5 dataset" # get info from hdf5 dataset compressed_size = dataset.id.get_storage_size() if hasattr(dataset, "nbytes"): # TODO: Remove this after h5py minimal version is larger than 3.0 @@ -1556,10 +1554,13 @@ def generate_dataset_html(dataset): "Compression opts": dataset.compression_opts, "Compression ratio": compression_ratio, } - array_info_dict.update(hdf5_info_dict) - # generate html repr - repr_html = generate_array_html_repr(array_info_dict, dataset, "HDF5 dataset") + elif isinstance(dataset, np.ndarray): + dataset_type = "NumPy array" + else: + dataset_type = dataset.__class__.__name__ + + repr_html = generate_array_html_repr(array_info_dict, dataset, dataset_type) return repr_html diff --git a/src/hdmf/build/__init__.py b/src/hdmf/build/__init__.py index ea5d21152..87e0ac57e 100644 --- a/src/hdmf/build/__init__.py +++ b/src/hdmf/build/__init__.py @@ -1,4 +1,4 @@ -from .builders import Builder, DatasetBuilder, GroupBuilder, LinkBuilder, ReferenceBuilder, RegionBuilder +from .builders import Builder, DatasetBuilder, GroupBuilder, LinkBuilder, ReferenceBuilder from .classgenerator import CustomClassGenerator, MCIClassGenerator from .errors import (BuildError, OrphanContainerBuildError, ReferenceTargetNotBuiltError, ContainerConfigurationError, ConstructError) diff --git a/src/hdmf/build/builders.py b/src/hdmf/build/builders.py index cb658b6d4..2d90c24e3 100644 --- a/src/hdmf/build/builders.py +++ b/src/hdmf/build/builders.py @@ -6,7 +6,6 @@ from datetime import datetime, date import numpy as np -from h5py import RegionReference from ..utils import docval, getargs, get_docval @@ -320,11 +319,10 @@ def values(self): class DatasetBuilder(BaseBuilder): OBJECT_REF_TYPE = 'object' - REGION_REF_TYPE = 'region' @docval({'name': 'name', 'type': str, 'doc': 'The name of the dataset.'}, {'name': 'data', - 'type': ('array_data', 'scalar_data', 'data', 'DatasetBuilder', 'RegionBuilder', Iterable, datetime, date), + 'type': ('array_data', 'scalar_data', 'data', 'DatasetBuilder', Iterable, datetime, date), 'doc': 'The data in this dataset.', 'default': None}, {'name': 'dtype', 'type': (type, np.dtype, str, list), 'doc': 'The datatype of this dataset.', 'default': None}, @@ -429,20 +427,3 @@ def __init__(self, **kwargs): def builder(self): """The target builder object.""" return self['builder'] - - -class RegionBuilder(ReferenceBuilder): - - @docval({'name': 'region', 'type': (slice, tuple, list, RegionReference), - 'doc': 'The region, i.e. slice or indices, into the target dataset.'}, - {'name': 'builder', 'type': DatasetBuilder, 'doc': 'The dataset this region reference applies to.'}) - def __init__(self, **kwargs): - """Create a builder object for a region reference.""" - region, builder = getargs('region', 'builder', kwargs) - super().__init__(builder) - self['region'] = region - - @property - def region(self): - """The selected region of the target dataset.""" - return self['region'] diff --git a/src/hdmf/build/objectmapper.py b/src/hdmf/build/objectmapper.py index c2ef44b5f..176de322c 100644 --- a/src/hdmf/build/objectmapper.py +++ b/src/hdmf/build/objectmapper.py @@ -6,7 +6,7 @@ import numpy as np -from .builders import DatasetBuilder, GroupBuilder, LinkBuilder, Builder, ReferenceBuilder, RegionBuilder, BaseBuilder +from .builders import DatasetBuilder, GroupBuilder, LinkBuilder, Builder, ReferenceBuilder, BaseBuilder from .errors import (BuildError, OrphanContainerBuildError, ReferenceTargetNotBuiltError, ContainerConfigurationError, ConstructError) from .manager import Proxy, BuildManager @@ -1214,8 +1214,6 @@ def __get_subspec_values(self, builder, spec, manager): continue if isinstance(attr_val, (GroupBuilder, DatasetBuilder)): ret[attr_spec] = manager.construct(attr_val) - elif isinstance(attr_val, RegionBuilder): # pragma: no cover - raise ValueError("RegionReferences as attributes is not yet supported") elif isinstance(attr_val, ReferenceBuilder): ret[attr_spec] = manager.construct(attr_val.builder) else: diff --git a/src/hdmf/container.py b/src/hdmf/container.py index 864b34ee9..ce4e8b821 100644 --- a/src/hdmf/container.py +++ b/src/hdmf/container.py @@ -707,8 +707,11 @@ def _generate_field_html(self, key, value, level, access_code): return f'