diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 6598f91..a739691 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,10 +1,12 @@ name: Lint +permissions: + contents: read on: pull_request: jobs: - lint: + flake8: runs-on: ubuntu-latest steps: @@ -15,4 +17,26 @@ jobs: - uses: TrueBrain/actions-flake8@v2 with: flake8_version: 6.0.0 - plugins: flake8-isort==6.1.1 flake8-quotes==3.4.0 + plugins: flake8-isort==6.1.1 flake8-quotes==3.4.0 flake8-commas==4.0.0 + + mypy: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: 3.9 + + - run: | + pip install poetry + poetry install --with=mypy -E all + + - run: | + poetry run mypy \ + --check-untyped-defs \ + --disable-error-code=import-untyped \ + --strict-equality \ + --warn-redundant-casts \ + --warn-unused-ignores \ + mandible diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7436549..f3ab535 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,4 +1,6 @@ name: Test +permissions: + contents: read on: push: diff --git a/mandible/metadata_mapper/builder.py b/mandible/metadata_mapper/builder.py index 90a6d58..509b645 100644 --- a/mandible/metadata_mapper/builder.py +++ b/mandible/metadata_mapper/builder.py @@ -103,7 +103,7 @@ def mapped( directive_name = Mapped.directive_name assert directive_name is not None - params = { + params: dict[str, Any] = { "source": source, "key": key, } diff --git a/mandible/metadata_mapper/directive/reformatted.py b/mandible/metadata_mapper/directive/reformatted.py index 593b9f2..49cd7f0 100644 --- a/mandible/metadata_mapper/directive/reformatted.py +++ b/mandible/metadata_mapper/directive/reformatted.py @@ -4,8 +4,9 @@ from mandible.metadata_mapper.exception import MetadataMapperError from mandible.metadata_mapper.format import FORMAT_REGISTRY +from mandible.metadata_mapper.types import Key -from .directive import Key, TemplateDirective, get_key +from .directive import TemplateDirective, get_key @dataclass diff --git a/mandible/metadata_mapper/exception.py b/mandible/metadata_mapper/exception.py index f4d9ccc..5878b35 100644 --- a/mandible/metadata_mapper/exception.py +++ b/mandible/metadata_mapper/exception.py @@ -1,3 +1,6 @@ +from typing import Optional + + class MetadataMapperError(Exception): """A generic error raised by the MetadataMapper""" @@ -8,7 +11,7 @@ def __init__(self, msg: str): class TemplateError(MetadataMapperError): """An error that occurred while processing the metadata template.""" - def __init__(self, msg: str, debug_path: str = None): + def __init__(self, msg: str, debug_path: Optional[str] = None): super().__init__(msg) self.debug_path = debug_path @@ -26,7 +29,7 @@ class ContextValueError(MetadataMapperError): def __init__( self, msg: str, - source_name: str = None, + source_name: Optional[str] = None, ): super().__init__(msg) self.source_name = source_name diff --git a/mandible/metadata_mapper/format/__init__.py b/mandible/metadata_mapper/format/__init__.py index b57a46c..0f3899d 100644 --- a/mandible/metadata_mapper/format/__init__.py +++ b/mandible/metadata_mapper/format/__init__.py @@ -11,12 +11,12 @@ try: from .h5 import H5 except ImportError: - from .format import H5 + from .format import H5 # type: ignore try: from .xml import Xml except ImportError: - from .format import Xml + from .format import Xml # type: ignore __all__ = ( diff --git a/mandible/metadata_mapper/format/format.py b/mandible/metadata_mapper/format/format.py index cda2cb1..168853d 100644 --- a/mandible/metadata_mapper/format/format.py +++ b/mandible/metadata_mapper/format/format.py @@ -1,11 +1,12 @@ import contextlib +import inspect import json import re import zipfile from abc import ABC, abstractmethod -from collections.abc import Iterable +from collections.abc import Generator, Iterable from dataclasses import dataclass -from typing import IO, Any, TypeVar +from typing import IO, Any, Generic, TypeVar from mandible import jsonpath from mandible.metadata_mapper.key import RAISE_EXCEPTION, Key @@ -50,7 +51,7 @@ def get_value(self, file: IO[bytes], key: Key) -> Any: @dataclass -class FileFormat(Format, ABC, register=False): +class FileFormat(Format, Generic[T], ABC, register=False): """A Format for querying files from a standard data file. Simple, single format data types such as 'json' that can be queried @@ -76,7 +77,7 @@ def get_value(self, file: IO[bytes], key: Key) -> Any: with self.parse_data(file) as data: return self._eval_key_wrapper(data, key) - def _eval_key_wrapper(self, data, key: Key) -> Any: + def _eval_key_wrapper(self, data: T, key: Key) -> Any: try: return self.eval_key(data, key) except KeyError as e: @@ -116,7 +117,7 @@ def eval_key(data: T, key: Key) -> Any: @dataclass -class _PlaceholderBase(FileFormat, register=False): +class _PlaceholderBase(FileFormat[None], register=False): """ Base class for defining placeholder implementations for classes that require extra dependencies to be installed @@ -124,16 +125,18 @@ class _PlaceholderBase(FileFormat, register=False): def __init__(self, dep: str): raise Exception( f"{dep} must be installed to use the {self.__class__.__name__} " - "format class" + "format class", ) @staticmethod - def parse_data(file: IO[bytes]) -> contextlib.AbstractContextManager[T]: - pass + def parse_data(file: IO[bytes]) -> contextlib.AbstractContextManager[None]: + # __init__ always raises + raise RuntimeError("Unreachable!") @staticmethod - def eval_key(data: T, key: Key): - pass + def eval_key(data: None, key: Key): + # __init__ always raises + raise RuntimeError("Unreachable!") @dataclass @@ -151,10 +154,10 @@ def __init__(self): # Define formats that don't require extra dependencies @dataclass -class Json(FileFormat): +class Json(FileFormat[dict]): @staticmethod @contextlib.contextmanager - def parse_data(file: IO[bytes]) -> dict: + def parse_data(file: IO[bytes]) -> Generator[dict]: yield json.load(file) @staticmethod @@ -237,20 +240,26 @@ def _matches_filters(self, zipinfo: zipfile.ZipInfo) -> bool: return True +ZIP_INFO_ATTRS = [ + name + for name, _ in inspect.getmembers(zipfile.ZipInfo, inspect.isdatadescriptor) + if not name.startswith("_") +] + + @dataclass -class ZipInfo(FileFormat): +class ZipInfo(FileFormat[dict]): """Query Zip headers and directory information.""" @staticmethod @contextlib.contextmanager - def parse_data(file: IO[bytes]) -> dict: + def parse_data(file: IO[bytes]) -> Generator[dict]: with zipfile.ZipFile(file, "r") as zf: yield { "infolist": [ { k: getattr(info, k) - for k in info.__slots__ - if not k.startswith("_") + for k in ZIP_INFO_ATTRS } for info in zf.infolist() ], diff --git a/mandible/metadata_mapper/format/h5.py b/mandible/metadata_mapper/format/h5.py index ac2365b..6ebce94 100644 --- a/mandible/metadata_mapper/format/h5.py +++ b/mandible/metadata_mapper/format/h5.py @@ -11,13 +11,13 @@ @dataclass -class H5(FileFormat): +class H5(FileFormat[Any]): @staticmethod def parse_data(file: IO[bytes]) -> contextlib.AbstractContextManager[Any]: return h5py.File(file, "r") @staticmethod - def eval_key(data, key: Key) -> Any: + def eval_key(data: Any, key: Key) -> Any: return normalize(data[key.key][()]) diff --git a/mandible/metadata_mapper/format/xml.py b/mandible/metadata_mapper/format/xml.py index 1dbee9f..3e09af2 100644 --- a/mandible/metadata_mapper/format/xml.py +++ b/mandible/metadata_mapper/format/xml.py @@ -1,6 +1,7 @@ import contextlib +from collections.abc import Generator, Iterable from dataclasses import dataclass -from typing import IO, Any +from typing import IO, Any, Union from lxml import etree @@ -10,16 +11,34 @@ @dataclass -class Xml(FileFormat): +class Xml(FileFormat[etree._ElementTree]): @staticmethod @contextlib.contextmanager - def parse_data(file: IO[bytes]) -> Any: + def parse_data(file: IO[bytes]) -> Generator[etree._ElementTree]: yield etree.parse(file) @staticmethod - def eval_key(data: etree.ElementTree, key: Key) -> Any: + def eval_key(data: etree._ElementTree, key: Key) -> Any: nsmap = data.getroot().nsmap - elements = data.xpath(key.key, namespaces=nsmap) - values = [element.text for element in elements] + xpath_result = data.xpath( + key.key, + # Lxml type stubs don't handle None key for default namespaces + namespaces=nsmap, # type: ignore + ) + if isinstance(xpath_result, Iterable): + values = [convert_result(item) for item in xpath_result] - return key.resolve_list_match(values) + return key.resolve_list_match(values) + + # Xpath supports functions such as `count` that can result in + # `data.xpath` returning something other than a list of matches. + return xpath_result + + +def convert_result(result: Union[etree._Element, int, str, bytes, tuple]): + if isinstance(result, etree._Element): + return result.text + if isinstance(result, (int, str, bytes)): + return result + + raise TypeError(f"Unsupported type {repr(result.__class__.__name__)}") diff --git a/mandible/metadata_mapper/mapper.py b/mandible/metadata_mapper/mapper.py index 7f71d0e..d198d0a 100644 --- a/mandible/metadata_mapper/mapper.py +++ b/mandible/metadata_mapper/mapper.py @@ -16,7 +16,7 @@ class MetadataMapper: def __init__( self, template: Template, - source_provider: SourceProvider = None, + source_provider: Optional[SourceProvider] = None, *, directive_marker: str = "@", ): @@ -48,7 +48,7 @@ def get_metadata(self, context: Context) -> Template: raise except Exception as e: raise MetadataMapperError( - f"failed to cache source keys: {e}" + f"failed to cache source keys: {e}", ) from e for name, source in sources.items(): @@ -66,21 +66,22 @@ def get_metadata(self, context: Context) -> Template: raise except Exception as e: raise MetadataMapperError( - f"failed to evaluate template: {e}" + f"failed to evaluate template: {e}", ) from e def _prepare_directives(self, context: Context, sources: dict[str, Source]): for value, debug_path in _walk_values(self.template): if isinstance(value, dict): - directive_name = self._get_directive_name(value, debug_path) - if directive_name is None: + directive_config = self._get_directive_name(value, debug_path) + if directive_config is None: continue + directive_name, directive_body = directive_config directive = self._get_directive( directive_name, context, sources, - value[directive_name], + directive_body, f"{debug_path}.{directive_name}", ) directive.prepare() @@ -91,13 +92,14 @@ def _replace_template( template: Template, sources: dict[str, Source], debug_path: str = "$", - ): + ) -> Template: if isinstance(template, dict): - directive_name = self._get_directive_name( + directive_config = self._get_directive_name( template, - debug_path + debug_path, ) - if directive_name is not None: + if directive_config is not None: + directive_name, directive_body = directive_config debug_path = f"{debug_path}.{directive_name}" directive = self._get_directive( directive_name, @@ -110,15 +112,15 @@ def _replace_template( sources, debug_path=f"{debug_path}.{k}", ) - for k, v in template[directive_name].items() + for k, v in directive_body.items() }, - debug_path + debug_path, ) try: return directive.call() except Exception as e: raise MetadataMapperError( - f"failed to call directive at {debug_path}: {e}" + f"failed to call directive at {debug_path}: {e}", ) from e return { @@ -146,38 +148,48 @@ def _replace_template( def _get_directive_name( self, - value: dict, + value: dict[str, Template], debug_path: str, - ) -> Optional[str]: - directive_names = [ - key for key in value - if key.startswith(self.directive_marker) + ) -> Optional[tuple[str, dict[str, Template]]]: + directive_configs = [ + (k, v) + for (k, v) in value.items() + if k.startswith(self.directive_marker) ] - if not directive_names: + if not directive_configs: return None - if len(directive_names) > 1: + if len(directive_configs) > 1: raise TemplateError( "multiple directives found in config: " - f"{', '.join(repr(d) for d in directive_names)}", - debug_path + f"{', '.join(repr(k) for k, v in directive_configs)}", + debug_path, + ) + + directive_name, directive_config = directive_configs[0] + + if not isinstance(directive_config, dict): + raise TemplateError( + "directive body should be type 'dict' not " + f"{repr(directive_config.__class__.__name__)}", + f"{debug_path}.{directive_name}", ) - return directive_names[0] + return directive_name, directive_config def _get_directive( self, directive_name: str, context: Context, sources: dict[str, Source], - config: dict, + config: dict[str, Template], debug_path: str, ) -> TemplateDirective: cls = DIRECTIVE_REGISTRY.get(directive_name[len(self.directive_marker):]) if cls is None: raise TemplateError( f"invalid directive {repr(directive_name)}", - debug_path + debug_path, ) argspec = inspect.getfullargspec(cls.__init__) @@ -186,7 +198,7 @@ def _get_directive( required_keys = set( argspec.args[3:-len(argspec.defaults)] if argspec.defaults else - argspec.args[3:] + argspec.args[3:], ) config_keys = set(config.keys()) diff = required_keys - config_keys @@ -198,7 +210,7 @@ def _get_directive( raise TemplateError( f"missing key{s}: " f"{', '.join(repr(d) for d in sorted(diff))}", - debug_path + debug_path, ) # For forward compatibility, ignore any unexpected keys diff --git a/mandible/metadata_mapper/source.py b/mandible/metadata_mapper/source.py index f702996..acbe500 100644 --- a/mandible/metadata_mapper/source.py +++ b/mandible/metadata_mapper/source.py @@ -55,6 +55,6 @@ def query_all_values(self, context: Context): "%s: using keys %r, got new values %r", self, keys, - new_values + new_values, ) self._values.update(new_values) diff --git a/mandible/metadata_mapper/source_provider.py b/mandible/metadata_mapper/source_provider.py index a32accf..cb76c0d 100644 --- a/mandible/metadata_mapper/source_provider.py +++ b/mandible/metadata_mapper/source_provider.py @@ -11,7 +11,7 @@ T = TypeVar("T") -REGISTRY_TYPE_MAP = { +REGISTRY_TYPE_MAP: dict[str, dict[str, Any]] = { "Format": FORMAT_REGISTRY, "Source": SOURCE_REGISTRY, "Storage": STORAGE_REGISTRY, @@ -72,7 +72,7 @@ def _create_object( cls_name = config.get("class") if cls_name is None: raise SourceProviderError( - f"missing key 'class' in config {config}" + f"missing key 'class' in config {config}", ) # TODO(reweeden): As of python3.10, inspect.get_annotations(parent_cls) diff --git a/mandible/metadata_mapper/storage/__init__.py b/mandible/metadata_mapper/storage/__init__.py index 298866a..cee45fe 100644 --- a/mandible/metadata_mapper/storage/__init__.py +++ b/mandible/metadata_mapper/storage/__init__.py @@ -11,12 +11,12 @@ try: from .cmr_query import CmrQuery except ImportError: - from .storage import CmrQuery + from .storage import CmrQuery # type: ignore try: from .http_request import HttpRequest except ImportError: - from .storage import HttpRequest + from .storage import HttpRequest # type: ignore __all__ = ( diff --git a/mandible/metadata_mapper/storage/cmr_query.py b/mandible/metadata_mapper/storage/cmr_query.py index 9f80605..d5d95bf 100644 --- a/mandible/metadata_mapper/storage/cmr_query.py +++ b/mandible/metadata_mapper/storage/cmr_query.py @@ -18,7 +18,7 @@ class CmrQuery(HttpRequest): format: str = "" token: Optional[str] = None - def __post_init__(self, url: str): + def __post_init__(self, url: Optional[str]): if url: raise ValueError( "do not set 'url' directly, use 'base_url' and 'path' instead", diff --git a/mandible/metadata_mapper/storage/storage.py b/mandible/metadata_mapper/storage/storage.py index a6854ae..1ea69e4 100644 --- a/mandible/metadata_mapper/storage/storage.py +++ b/mandible/metadata_mapper/storage/storage.py @@ -43,11 +43,12 @@ class _PlaceholderBase(Storage, register=False): def __init__(self, dep: str): raise Exception( f"{dep} must be installed to use the {self.__class__.__name__} " - "format class" + "format class", ) def open_file(self, context: Context) -> IO[bytes]: - pass + # __init__ always raises + raise RuntimeError("Unreachable!") @dataclass diff --git a/mandible/umm_classes/base.py b/mandible/umm_classes/base.py index 235b2da..0e7fd34 100644 --- a/mandible/umm_classes/base.py +++ b/mandible/umm_classes/base.py @@ -111,7 +111,7 @@ def get_umm_date_time_format(self) -> str: def related_urls_files_sort_key(self, file: CMAGranuleFile) -> tuple: type_order = self.get_related_url_type_order() try: - type_ordinal = type_order.index(file.get("type")) + type_ordinal = type_order.index(file.get("type", "data")) except ValueError: type_ordinal = len(type_order) diff --git a/pyproject.toml b/pyproject.toml index d3fe048..b788cec 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,11 @@ pytest = "^8.0.2" pytest-cov = "^4.0.0" pytest-mock = "^3.8.2" +[tool.poetry.group.mypy.dependencies] +lxml-stubs = "^0.5.1" +mypy = "^1.14.1" +types-requests = "<2.31.0.7" + [tool.pytest.ini_options] markers = [ diff --git a/tests/integration_tests/test_metadata_mapper.py b/tests/integration_tests/test_metadata_mapper.py index ca01498..94521a5 100644 --- a/tests/integration_tests/test_metadata_mapper.py +++ b/tests/integration_tests/test_metadata_mapper.py @@ -287,6 +287,26 @@ def test_invalid_directive(context): mapper.get_metadata(context) +def test_invalid_directive_config_type(context): + mapper = MetadataMapper( + template={ + "foo": { + "@mapped": 100, + }, + }, + source_provider=ConfigSourceProvider({}), + ) + + with pytest.raises( + MetadataMapperError, + match=( + r"failed to process template at \$\.foo\.@mapped: " + "directive body should be type 'dict' not 'int'" + ), + ): + mapper.get_metadata(context) + + def test_multiple_directives(context): mapper = MetadataMapper( template={ diff --git a/tests/test_format.py b/tests/test_format.py index 58a6db4..1e69d31 100644 --- a/tests/test_format.py +++ b/tests/test_format.py @@ -310,6 +310,7 @@ def test_xml(): Key("./nested/qux"), Key("./list/v", return_list=True), Key("./list/v", return_first=True), + Key("count(./list/v)"), ], ) == { Key("/root/foo"): "foo value", @@ -318,6 +319,7 @@ def test_xml(): Key("./nested/qux"): "qux nested value", Key("./list/v", return_list=True): ["list", "value"], Key("./list/v", return_first=True): "list", + Key("count(./list/v)"): 2, } diff --git a/tests/test_source_provider.py b/tests/test_source_provider.py index 888639d..5b3e404 100644 --- a/tests/test_source_provider.py +++ b/tests/test_source_provider.py @@ -56,23 +56,23 @@ def test_config_source_provider(sources): "storage": { "class": "LocalFile", "filters": { - "name": "foo" - } + "name": "foo", + }, }, "format": { - "class": "Json" - } + "class": "Json", + }, }, "bar": { "storage": { "class": "LocalFile", "filters": { - "name": "bar" - } + "name": "bar", + }, }, "format": { - "class": "Json" - } + "class": "Json", + }, }, "baz": { "storage": { @@ -145,35 +145,35 @@ def test_config_source_provider_all_formats(): "storage": { "class": "LocalFile", "filters": { - "name": "foo" - } + "name": "foo", + }, }, "format": { - "class": "Json" - } + "class": "Json", + }, }, "xml": { "storage": { "class": "LocalFile", "filters": { - "name": "bar" - } + "name": "bar", + }, }, "format": { - "class": "Xml" - } + "class": "Xml", + }, }, "h5": { "storage": { "class": "LocalFile", "filters": { - "name": "baz" - } + "name": "baz", + }, }, "format": { - "class": "H5" - } - } + "class": "H5", + }, + }, }) assert provider.get_sources() == { @@ -237,9 +237,9 @@ def test_config_source_provider_missing_storage(): provider = ConfigSourceProvider({ "source": { "format": { - "class": "Json" - } - } + "class": "Json", + }, + }, }) with pytest.raises( @@ -256,9 +256,9 @@ def test_config_source_provider_invalid_storage(): provider = ConfigSourceProvider({ "source": { "storage": { - "class": "NotARealStorage" - } - } + "class": "NotARealStorage", + }, + }, }) with pytest.raises( @@ -266,7 +266,7 @@ def test_config_source_provider_invalid_storage(): match=( "failed to create source 'source': " "invalid storage type 'NotARealStorage'" - ) + ), ): provider.get_sources() @@ -277,9 +277,9 @@ def test_config_source_provider_invalid_storage_kwargs(cls_name): "source": { "storage": { "class": cls_name, - "invalid_arg": 1 - } - } + "invalid_arg": 1, + }, + }, }) with pytest.raises( @@ -288,7 +288,7 @@ def test_config_source_provider_invalid_storage_kwargs(cls_name): "failed to create source 'source': " rf"({cls_name}\.)?__init__\(\) got an unexpected keyword argument " "'invalid_arg'" - ) + ), ): provider.get_sources() @@ -297,9 +297,9 @@ def test_config_source_provider_missing_format(): provider = ConfigSourceProvider({ "source": { "storage": { - "class": "S3File" - } - } + "class": "S3File", + }, + }, }) with pytest.raises( @@ -316,12 +316,12 @@ def test_config_source_provider_invalid_format(): provider = ConfigSourceProvider({ "source": { "storage": { - "class": "S3File" + "class": "S3File", }, "format": { - "class": "NotARealFormat" - } - } + "class": "NotARealFormat", + }, + }, }) with pytest.raises( @@ -329,7 +329,7 @@ def test_config_source_provider_invalid_format(): match=( "failed to create source 'source': " "invalid format type 'NotARealFormat'" - ) + ), ): provider.get_sources() @@ -339,7 +339,7 @@ def test_config_source_provider_invalid_format_kwargs(cls_name): provider = ConfigSourceProvider({ "source": { "storage": { - "class": "S3File" + "class": "S3File", }, "format": { "class": cls_name, @@ -354,6 +354,6 @@ def test_config_source_provider_invalid_format_kwargs(cls_name): "failed to create source 'source': " rf"({cls_name}\.)?__init__\(\) got an unexpected keyword argument " "'invalid_arg'" - ) + ), ): provider.get_sources() diff --git a/tests/test_storage.py b/tests/test_storage.py index ff6b0e6..7e5baa9 100644 --- a/tests/test_storage.py +++ b/tests/test_storage.py @@ -42,8 +42,8 @@ def test_local_file(data_path): context = Context( files=[{ "name": "local_file", - "path": str(data_path / "local_file.txt") - }] + "path": str(data_path / "local_file.txt"), + }], ) storage = LocalFile(filters={"name": "local_file"}) @@ -55,8 +55,8 @@ def test_local_file_name_match(data_path): context = Context( files=[{ "name": "local_file", - "path": str(data_path / "local_file.txt") - }] + "path": str(data_path / "local_file.txt"), + }], ) storage = LocalFile(filters={"name": "local_.*"}) @@ -68,8 +68,8 @@ def test_local_file_int_filter(data_path): context = Context( files=[{ "type": 0, - "path": str(data_path / "local_file.txt") - }] + "path": str(data_path / "local_file.txt"), + }], ) storage = LocalFile(filters={"type": 0}) @@ -84,7 +84,7 @@ def test_local_file_creation(): def test_local_file_name_match_error(): context = Context( - files=[{"name": "local_file"}] + files=[{"name": "local_file"}], ) storage = LocalFile(filters={"name": "foo.*"}) @@ -110,8 +110,8 @@ def test_s3_file_s3uri(s3_resource): files=[{ "name": "s3_file", "bucket": "test-bucket", - "key": "bucket_file.txt" - }] + "key": "bucket_file.txt", + }], ) storage = S3File(filters={"name": "s3_file"}) @@ -129,8 +129,8 @@ def test_s3_file_s3fs_kwargs(s3_resource): files=[{ "name": "s3_file", "bucket": "test-bucket", - "key": "bucket_file.txt" - }] + "key": "bucket_file.txt", + }], ) storage = S3File( filters={"name": "s3_file"}, @@ -161,24 +161,24 @@ def create_file(bucket, name, contents=None, type="data"): "type": type, "uri": f"https://example.asf.alaska.edu/{name}", "bucket": bucket.name, - "key": name + "key": name, } context = Context( files=[ create_file(bucket, "file1.txt"), create_file(bucket, "file2.txt", type="metadata"), - ] + ], ) storage = S3File(filters={ - "name": "file1.txt" + "name": "file1.txt", }) with storage.open_file(context) as f: assert f.read() == b"Content from file1.txt\n" storage = S3File(filters={ - "type": "metadata" + "type": "metadata", }) with storage.open_file(context) as f: assert f.read() == b"Content from file2.txt\n"