Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable ruff/bugbear rules (B) and fix issues #1702

Merged
merged 8 commits into from
May 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions docs/release.rst
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ Docs

Maintenance
~~~~~~~~~~~
* Enable ruff/bugbear rules (B) and fix issues.
By :user:`Dimitri Papadopoulos Orfanos <DimitriPapadopoulos>` :issue:`1702`.

* Minor updates to use `np.inf` instead of `np.PINF` / `np.NINF` in preparation for NumPy 2.0.0 release.
By :user:`Joe Hamman <jhamman>` :issue:`1842`.

Expand Down
5 changes: 5 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,11 @@ exclude = [
"docs"
]

[tool.ruff.lint]
extend-select = [
"B"
]

[tool.black]
line-length = 100
exclude = '''
Expand Down
8 changes: 4 additions & 4 deletions zarr/_storage/absstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,8 @@ def __getitem__(self, key):
blob_name = self._append_path_to_prefix(key)
try:
return self.client.download_blob(blob_name).readall()
except ResourceNotFoundError:
raise KeyError(f"Blob {blob_name} not found")
except ResourceNotFoundError as e:
raise KeyError(f"Blob {blob_name} not found") from e

def __setitem__(self, key, value):
value = ensure_bytes(value)
Expand All @@ -169,8 +169,8 @@ def __delitem__(self, key):

try:
self.client.delete_blob(self._append_path_to_prefix(key))
except ResourceNotFoundError:
raise KeyError(f"Blob {key} not found")
except ResourceNotFoundError as e:
raise KeyError(f"Blob {key} not found") from e

def __eq__(self, other):
return (
Expand Down
8 changes: 4 additions & 4 deletions zarr/_storage/v3_storage_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,8 +183,8 @@ def __getitem__(self, key):
shard_key, chunk_subkey = self._key_to_shard(key)
try:
full_shard_value = self.inner_store[shard_key]
except KeyError:
raise KeyError(key)
except KeyError as e:
raise KeyError(key) from e
index = self._get_index_from_buffer(full_shard_value)
chunk_slice = index.get_chunk_slice(chunk_subkey)
if chunk_slice is not None:
Expand Down Expand Up @@ -265,8 +265,8 @@ def __delitem__(self, key):
shard_key, chunk_subkey = self._key_to_shard(key)
try:
index = self._get_index_from_store(shard_key)
except KeyError:
raise KeyError(key)
except KeyError as e:
raise KeyError(key) from e

index.set_chunk_slice(chunk_subkey, None)

Expand Down
4 changes: 2 additions & 2 deletions zarr/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,8 +200,8 @@ def _load_metadata_nosync(self):
try:
mkey = _prefix_to_array_key(self._store, self._key_prefix)
meta_bytes = self._store[mkey]
except KeyError:
raise ArrayNotFoundError(self._path)
except KeyError as e:
raise ArrayNotFoundError(self._path) from e
else:
# decode and store metadata as instance members
meta = self._store._metadata_class.decode_array_metadata(meta_bytes)
Expand Down
2 changes: 1 addition & 1 deletion zarr/creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ def _kwargs_compat(compressor, fill_value, kwargs):

# ignore other keyword arguments
for k in kwargs:
warn(f"ignoring keyword argument {k!r}")
warn(f"ignoring keyword argument {k!r}", stacklevel=2)

return compressor, fill_value

Expand Down
10 changes: 5 additions & 5 deletions zarr/hierarchy.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,16 +187,16 @@ def __init__(
mkey = _prefix_to_group_key(self._store, self._key_prefix)
assert not mkey.endswith("root/.group")
meta_bytes = store[mkey]
except KeyError:
except KeyError as e:
if self._version == 2:
raise GroupNotFoundError(path)
raise GroupNotFoundError(path) from e
else:
implicit_prefix = meta_root + self._key_prefix
if self._store.list_prefix(implicit_prefix):
# implicit group does not have any metadata
self._meta = None
else:
raise GroupNotFoundError(path)
raise GroupNotFoundError(path) from e
else:
self._meta = self._store._metadata_class.decode_group_metadata(meta_bytes)

Expand Down Expand Up @@ -536,8 +536,8 @@ def __getattr__(self, item):
# allow access to group members via dot notation
try:
return self.__getitem__(item)
except KeyError:
raise AttributeError
except KeyError as e:
raise AttributeError from e

def __dir__(self):
# noinspection PyUnresolvedReferences
Expand Down
2 changes: 1 addition & 1 deletion zarr/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -932,7 +932,7 @@ def check_fields(fields, dtype):
# multiple field selection
out_dtype = np.dtype([(f, dtype[f]) for f in fields])
except KeyError as e:
raise IndexError(f"invalid 'fields' argument, field not found: {e!r}")
raise IndexError(f"invalid 'fields' argument, field not found: {e!r}") from e
else:
return out_dtype
else:
Expand Down
6 changes: 3 additions & 3 deletions zarr/meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,8 +310,8 @@ def decode_dtype(cls, d, validate=True):
# extract the type from the extension info
try:
d = d["type"]
except KeyError:
raise KeyError("Extended dtype info must provide a key named 'type'.")
except KeyError as e:
raise KeyError("Extended dtype info must provide a key named 'type'.") from e
d = cls._decode_dtype_descr(d)
dtype = np.dtype(d)
if validate:
Expand Down Expand Up @@ -518,7 +518,7 @@ def decode_array_metadata(cls, s: Union[MappingType, bytes, str]) -> MappingType
meta["storage_transformers"] = storage_transformers

except Exception as e:
raise MetadataError(f"error decoding metadata: {e}")
raise MetadataError(f"error decoding metadata: {e}") from e
else:
return meta

Expand Down
2 changes: 1 addition & 1 deletion zarr/meta_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def decode_metadata(b):
order=meta["order"],
)
except Exception as e:
raise MetadataError(f"error decoding metadata: {e}")
raise MetadataError(f"error decoding metadata: {e}") from e
else:
return meta

Expand Down
22 changes: 17 additions & 5 deletions zarr/n5.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,11 @@ def __setitem__(self, key: str, value: Any):

for k in n5_keywords:
if k in zarr_attrs:
warnings.warn(f"Attribute {k} is a reserved N5 keyword", UserWarning)
warnings.warn(
f"Attribute {k} is a reserved N5 keyword",
UserWarning,
stacklevel=2,
)

# remove previous user attributes
for k in list(n5_attrs.keys()):
Expand Down Expand Up @@ -327,7 +331,10 @@ class N5FSStore(FSStore):
def __init__(self, *args, **kwargs):
if "dimension_separator" in kwargs:
kwargs.pop("dimension_separator")
warnings.warn("Keyword argument `dimension_separator` will be ignored")
warnings.warn(
"Keyword argument `dimension_separator` will be ignored",
stacklevel=2,
)
dimension_separator = "."
super().__init__(*args, dimension_separator=dimension_separator, **kwargs)

Expand Down Expand Up @@ -411,7 +418,11 @@ def __setitem__(self, key: str, value: Any):

for k in n5_keywords:
if k in zarr_attrs.keys():
warnings.warn(f"Attribute {k} is a reserved N5 keyword", UserWarning)
warnings.warn(
f"Attribute {k} is a reserved N5 keyword",
UserWarning,
stacklevel=2,
)

# replace previous user attributes
for k in list(n5_attrs.keys()):
Expand Down Expand Up @@ -597,8 +608,8 @@ def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dic
array_metadata["n5"] = N5_FORMAT
try:
dtype = np.dtype(array_metadata["dataType"])
except TypeError:
raise TypeError(f"Data type {array_metadata['dataType']} is not supported by N5")
except TypeError as e:
raise TypeError(f"Data type {array_metadata['dataType']} is not supported by N5") from e

array_metadata["dataType"] = dtype.name
array_metadata["dimensions"] = array_metadata["dimensions"][::-1]
Expand Down Expand Up @@ -711,6 +722,7 @@ def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict
"Not all N5 implementations support lzma compression (yet). You "
"might not be able to open the dataset with another N5 library.",
RuntimeWarning,
stacklevel=2,
)
n5_config["format"] = _compressor_config["format"]
n5_config["check"] = _compressor_config["check"]
Expand Down
16 changes: 10 additions & 6 deletions zarr/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -589,11 +589,15 @@ def _init_array_metadata(
"missing object_codec for object array; this will raise a "
"ValueError in version 3.0",
FutureWarning,
stacklevel=2,
)
else:
filters_config.insert(0, object_codec.get_config())
elif object_codec is not None:
warnings.warn("an object_codec is only needed for object arrays")
warnings.warn(
"an object_codec is only needed for object arrays",
stacklevel=2,
)

# use null to indicate no filters
if not filters_config:
Expand Down Expand Up @@ -869,8 +873,8 @@ def __getitem__(self, item: str):
parent, key = self._get_parent(item)
try:
value = parent[key]
except KeyError:
raise KeyError(item)
except KeyError as e:
raise KeyError(item) from e
else:
if isinstance(value, self.cls):
raise KeyError(item)
Expand All @@ -888,8 +892,8 @@ def __delitem__(self, item: str):
parent, key = self._get_parent(item)
try:
del parent[key]
except KeyError:
raise KeyError(item)
except KeyError as e:
raise KeyError(item) from e

def __contains__(self, item: str): # type: ignore[override]
try:
Expand Down Expand Up @@ -1137,7 +1141,7 @@ def __setitem__(self, key, value):
os.makedirs(dir_path)
except OSError as e:
if e.errno != errno.EEXIST:
raise KeyError(key)
raise KeyError(key) from e

# write to temporary file
# note we're not using tempfile.NamedTemporaryFile to avoid restrictive file permissions
Expand Down
2 changes: 1 addition & 1 deletion zarr/tests/test_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -601,7 +601,7 @@ def test_metadata3_exceptions():
required = ["zarr_format", "metadata_encoding", "metadata_key_suffix", "extensions"]
for key in required:
meta = copy.copy(_default_entry_point_metadata_v3)
meta.pop("zarr_format")
meta.pop(key)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I hope I haven't missed something here. Otherwise, what would be the point of the key iterator?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Wow 🤯

with pytest.raises(ValueError):
# cannot encode metadata that is missing a required key
Metadata3.encode_hierarchy_metadata(meta)
Expand Down
6 changes: 3 additions & 3 deletions zarr/tests/test_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -1353,13 +1353,13 @@ def test_exceptions(self, memory_store):
# no exception from FSStore.getitems getting KeyError
assert group.store.getitems(["foo"], contexts={}) == {}
# exception from FSStore.getitems getting AttributeError
with pytest.raises(Exception):
with pytest.raises(Exception): # noqa: B017
group.store.getitems(["x/0"], contexts={})
# exception from FSStore.getitems getting AttributeError
with pytest.raises(Exception):
with pytest.raises(Exception): # noqa: B017
x[...]
# exception from FSStore.__getitem__ getting AttributeError
with pytest.raises(Exception):
with pytest.raises(Exception): # noqa: B017
y[...]


Expand Down
2 changes: 1 addition & 1 deletion zarr/tests/test_storage_v3.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ def test_rename_nonexisting(self):

def test_get_partial_values(self):
store = self.create_store()
store.supports_efficient_get_partial_values in [True, False]
assert store.supports_efficient_get_partial_values in [True, False]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's one of the most useful bugbear rules. It finds such issues much more often than I would have thought.

store[data_root + "foo"] = b"abcdefg"
store[data_root + "baz"] = b"z"
assert [b"a"] == store.get_partial_values([(data_root + "foo", (0, 1))])
Expand Down
12 changes: 6 additions & 6 deletions zarr/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,11 +198,11 @@ def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype
args = []
try:
object_codec = codec_registry[codec_id](*args)
except KeyError: # pragma: no cover
except KeyError as e: # pragma: no cover
raise ValueError(
f"codec {codec_id!r} for object type {key!r} is not "
f"available; please provide an object_codec manually"
)
) from e
return dtype, object_codec

dtype = np.dtype(dtype)
Expand Down Expand Up @@ -332,7 +332,7 @@ def normalize_fill_value(fill_value, dtype: np.dtype):
raise ValueError(
f"fill_value {fill_value!r} is not valid for dtype {dtype}; "
f"nested exception: {e}"
)
) from e

return fill_value

Expand Down Expand Up @@ -492,13 +492,13 @@ def tree_widget_sublist(node, root=False, expand=False):
def tree_widget(group, expand, level):
try:
import ipytree
except ImportError as error:
except ImportError as e:
raise ImportError(
f"{error}: Run `pip install zarr[jupyter]` or `conda install ipytree`"
f"{e}: Run `pip install zarr[jupyter]` or `conda install ipytree`"
f"to get the required ipytree dependency for displaying the tree "
f"widget. If using jupyterlab<3, you also need to run "
f"`jupyter labextension install ipytree`"
)
) from e

result = ipytree.Tree()
root = TreeNode(group, level=level)
Expand Down