Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Apply assorted Repo-Review suggestions #1705

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,15 @@ updates:
directory: "/"
schedule:
interval: "daily"
groups:
actions:
patterns:
- "*"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
groups:
actions:
patterns:
- "*"
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ repos:
rev: 'v0.4.3'
hooks:
- id: ruff
- repo: https://github.com/psf/black
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.4.2
hooks:
- id: black
Expand Down
11 changes: 11 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -128,15 +128,26 @@ ignore_missing_imports = true
warn_unused_configs = true
warn_redundant_casts = true
warn_unused_ignores = true
enable_error_code = [
"ignore-without-code",
"redundant-expr",
"truthy-bool",
]

[tool.pytest.ini_options]
minversion = "7"
log_cli_level = "INFO"
# xfail_strict = true # FIXME?
doctest_optionflags = [
"NORMALIZE_WHITESPACE",
"ELLIPSIS",
"IGNORE_EXCEPTION_DETAIL",
]
addopts = [
"--durations=10",
"-ra",
"--strict-config",
"--strict-markers",
]
filterwarnings = [
"error:::zarr.*",
Expand Down
12 changes: 6 additions & 6 deletions zarr/_storage/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ def _valid_key(self, key: str) -> bool:
0-9, or in the set /.-_ it will return True if that's the case, False
otherwise.
"""
if not isinstance(key, str) or not key.isascii():
if not isinstance(key, str) or not key.isascii(): # type: ignore[redundant-expr]
return False
if set(key) - self._valid_key_characters:
return False
Expand Down Expand Up @@ -627,11 +627,11 @@ def _rename_from_keys(store: BaseStore, src_path: str, dst_path: str) -> None:
for root_prefix in [meta_root, data_root]:
_src_prefix = root_prefix + src_prefix
_dst_prefix = root_prefix + dst_prefix
for key in store.list_prefix(_src_prefix): # type: ignore
for key in store.list_prefix(_src_prefix): # type: ignore[attr-defined]
new_key = _dst_prefix + key[len(_src_prefix) :]
store[new_key] = store.pop(key)
any_renamed = True
any_meta_renamed = _rename_metadata_v3(store, src_path, dst_path) # type: ignore
any_meta_renamed = _rename_metadata_v3(store, src_path, dst_path) # type: ignore[arg-type]
any_renamed = any_meta_renamed or any_renamed

if not any_renamed:
Expand Down Expand Up @@ -680,7 +680,7 @@ def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str

def _prefix_to_array_key(store: StoreLike, prefix: str) -> str:
if getattr(store, "_store_version", 2) == 3:
sfx = _get_metadata_suffix(store) # type: ignore
sfx = _get_metadata_suffix(store) # type: ignore[arg-type]
if prefix:
key = meta_root + prefix.rstrip("/") + ".array" + sfx
else:
Expand All @@ -692,7 +692,7 @@ def _prefix_to_array_key(store: StoreLike, prefix: str) -> str:

def _prefix_to_group_key(store: StoreLike, prefix: str) -> str:
if getattr(store, "_store_version", 2) == 3:
sfx = _get_metadata_suffix(store) # type: ignore
sfx = _get_metadata_suffix(store) # type: ignore[arg-type]
if prefix:
key = meta_root + prefix.rstrip("/") + ".group" + sfx
else:
Expand All @@ -705,7 +705,7 @@ def _prefix_to_group_key(store: StoreLike, prefix: str) -> str:
def _prefix_to_attrs_key(store: StoreLike, prefix: str) -> str:
if getattr(store, "_store_version", 2) == 3:
# for v3, attributes are stored in the array metadata
sfx = _get_metadata_suffix(store) # type: ignore
sfx = _get_metadata_suffix(store) # type: ignore[arg-type]
if prefix:
key = meta_root + prefix.rstrip("/") + ".array" + sfx
else:
Expand Down
2 changes: 1 addition & 1 deletion zarr/_storage/v3.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ class RmdirV3:

def rmdir(self, path: str = "") -> None:
path = normalize_storage_path(path)
_rmdir_from_keys_v3(self, path) # type: ignore
_rmdir_from_keys_v3(self, path) # type: ignore[arg-type]


class KVStoreV3(RmdirV3, KVStore, StoreV3):
Expand Down
2 changes: 1 addition & 1 deletion zarr/convenience.py
Original file line number Diff line number Diff line change
Expand Up @@ -1273,7 +1273,7 @@ def is_zarr_key(key):
else:
assert_zarr_v3_api_available()

sfx = _get_metadata_suffix(store) # type: ignore
sfx = _get_metadata_suffix(store) # type: ignore[arg-type]

def is_zarr_key(key):
return (
Expand Down
56 changes: 32 additions & 24 deletions zarr/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,11 +134,11 @@ def contains_group(store: StoreLike, path: Path = None, explicit_only=True) -> b
return True
# for v3, need to also handle implicit groups

sfx = _get_metadata_suffix(store) # type: ignore
sfx = _get_metadata_suffix(store) # type: ignore[arg-type]
implicit_prefix = key.replace(".group" + sfx, "")
if not implicit_prefix.endswith("/"):
implicit_prefix += "/"
if store.list_prefix(implicit_prefix): # type: ignore
if store.list_prefix(implicit_prefix): # type: ignore[union-attr]
return True
return False

Expand Down Expand Up @@ -207,15 +207,15 @@ def rmdir(store: StoreLike, path: Path = None):
`Store` interface."""
path = normalize_storage_path(path)
store_version = getattr(store, "_store_version", 2)
if hasattr(store, "rmdir") and store.is_erasable(): # type: ignore
if hasattr(store, "rmdir") and store.is_erasable(): # type: ignore[attr-defined]
# pass through
store.rmdir(path)
else:
# slow version, delete one key at a time
if store_version == 2:
_rmdir_from_keys(store, path)
else:
_rmdir_from_keys_v3(store, path) # type: ignore
_rmdir_from_keys_v3(store, path) # type: ignore[arg-type]


def rename(store: Store, src_path: Path, dst_path: Path):
Expand Down Expand Up @@ -262,11 +262,11 @@ def _getsize(store: BaseStore, path: Path = None) -> int:
if store_version == 3:
if path == "":
# have to list the root folders without trailing / in this case
members = store.list_prefix(data_root.rstrip("/")) # type: ignore
members += store.list_prefix(meta_root.rstrip("/")) # type: ignore
members = store.list_prefix(data_root.rstrip("/")) # type: ignore[attr-defined]
members += store.list_prefix(meta_root.rstrip("/")) # type: ignore[attr-defined]
else:
members = store.list_prefix(data_root + path) # type: ignore
members += store.list_prefix(meta_root + path) # type: ignore
members = store.list_prefix(data_root + path) # type: ignore[attr-defined]
members += store.list_prefix(meta_root + path) # type: ignore[attr-defined]
# also include zarr.json?
# members += ['zarr.json']
else:
Expand Down Expand Up @@ -447,7 +447,11 @@ def init_array(

if store_version == 3 and "zarr.json" not in store:
# initialize with default zarr.json entry level metadata
store["zarr.json"] = store._metadata_class.encode_hierarchy_metadata(None) # type: ignore
# fmt: off
store["zarr.json"] = (
store._metadata_class.encode_hierarchy_metadata(None) # type: ignore[union-attr]
)
# fmt: on

if not compressor:
# compatibility with legacy tests using compressor=[]
Expand Down Expand Up @@ -504,20 +508,20 @@ def _init_array_metadata(

# attempt to delete any pre-existing array in store
if array_meta_key in store:
store.erase(array_meta_key) # type: ignore
store.erase(array_meta_key) # type: ignore[union-attr]
if group_meta_key in store:
store.erase(group_meta_key) # type: ignore
store.erase_prefix(data_prefix) # type: ignore
store.erase(group_meta_key) # type: ignore[union-attr]
store.erase_prefix(data_prefix) # type: ignore[union-attr]
if chunk_store is not None:
chunk_store.erase_prefix(data_prefix) # type: ignore
chunk_store.erase_prefix(data_prefix) # type: ignore[attr-defined]

if "/" in path:
# path is a subfolder of an existing array, remove that array
parent_path = "/".join(path.split("/")[:-1])
sfx = _get_metadata_suffix(store) # type: ignore
sfx = _get_metadata_suffix(store) # type: ignore[arg-type]
array_key = meta_root + parent_path + ".array" + sfx
if array_key in store:
store.erase(array_key) # type: ignore
store.erase(array_key) # type: ignore[union-attr]

if not overwrite:
if contains_array(store, path):
Expand Down Expand Up @@ -601,7 +605,7 @@ def _init_array_metadata(

# use null to indicate no filters
if not filters_config:
filters_config = None # type: ignore
filters_config = None # type: ignore[assignment]

# initialize metadata
# TODO: don't store redundant dimension_separator for v3?
Expand Down Expand Up @@ -676,7 +680,11 @@ def init_group(

if store_version == 3 and "zarr.json" not in store:
# initialize with default zarr.json entry level metadata
store["zarr.json"] = store._metadata_class.encode_hierarchy_metadata(None) # type: ignore
# fmt: off
store["zarr.json"] = (
store._metadata_class.encode_hierarchy_metadata(None) # type: ignore[union-attr]
)
# fmt: on

# initialise metadata
_init_group_metadata(store=store, overwrite=overwrite, path=path, chunk_store=chunk_store)
Expand Down Expand Up @@ -712,13 +720,13 @@ def _init_group_metadata(

# attempt to delete any pre-existing array in store
if array_meta_key in store:
store.erase(array_meta_key) # type: ignore
store.erase(array_meta_key) # type: ignore[union-attr]
if group_meta_key in store:
store.erase(group_meta_key) # type: ignore
store.erase_prefix(data_prefix) # type: ignore
store.erase_prefix(meta_prefix) # type: ignore
store.erase(group_meta_key) # type: ignore[union-attr]
store.erase_prefix(data_prefix) # type: ignore[union-attr]
store.erase_prefix(meta_prefix) # type: ignore[union-attr]
if chunk_store is not None:
chunk_store.erase_prefix(data_prefix) # type: ignore
chunk_store.erase_prefix(data_prefix) # type: ignore[attr-defined]

if not overwrite:
if contains_array(store, path):
Expand All @@ -735,7 +743,7 @@ def _init_group_metadata(
# N.B., currently no metadata properties are needed, however there may
# be in future
if store_version == 3:
meta = {"attributes": {}} # type: ignore
meta = {"attributes": {}} # type: ignore[var-annotated]
else:
meta = {}
key = _prefix_to_group_key(store, _path_to_prefix(path))
Expand Down Expand Up @@ -2548,7 +2556,7 @@ def _cache_value(self, key: Path, value):
value_size = buffer_size(value)
# check size of the value against max size, as if the value itself exceeds max
# size then we are never going to cache it
if self._max_size is None or value_size <= self._max_size:
if self._max_size is None or value_size <= self._max_size: # type: ignore[redundant-expr]
Copy link
Contributor Author

@DimitriPapadopoulos DimitriPapadopoulos Mar 13, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

self._max_size is initialised here:

zarr-python/zarr/storage.py

Lines 2406 to 2408 in f58065b

def __init__(self, store: StoreLike, max_size: int):
self._store: BaseStore = BaseStore._ensure_store(store)
self._max_size = max_size

and here:

zarr-python/zarr/storage.py

Lines 2417 to 2420 in f58065b

def __getstate__(self):
return (
self._store,
self._max_size,

In the first case self._max_size shouldn't be None, but then the typing system cannot prevent giving the max_size parameter the None value. In the second case, without typing, I don't understand why ruff thinks the self._max_size is None check is redundant.

self._accommodate_value(value_size)
self._values_cache[key] = value
self._current_size += value_size
Expand Down
Loading