From 01381efb5b39dd96b0ec86ba705e2a98d510ab65 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Tue, 11 Jul 2023 09:24:47 -0800 Subject: [PATCH 01/40] Moved logging up, so if version check fails, we can still log it --- asf_search/__init__.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/asf_search/__init__.py b/asf_search/__init__.py index d6fab9d7..66d755ac 100644 --- a/asf_search/__init__.py +++ b/asf_search/__init__.py @@ -1,6 +1,12 @@ # backport of importlib.metadata for python < 3.8 from importlib_metadata import PackageNotFoundError, version + +## Setup logging now, so it's available if __version__ fails: import logging +ASF_LOGGER = logging.getLogger(__name__) +# Add null handle so we do nothing by default. It's up to whatever +# imports us, if they want logging. +ASF_LOGGER.addHandler(logging.NullHandler()) try: __version__ = version(__name__) @@ -14,11 +20,6 @@ ASF_LOGGER.exception(msg) raise PackageNotFoundError("Install with 'python3 -m pip install -e .' to use") from e -ASF_LOGGER = logging.getLogger(__name__) -# Add null handle so we do nothing by default. It's up to whatever -# imports us, if they want logging. -ASF_LOGGER.addHandler(logging.NullHandler()) - from .ASFSession import ASFSession from .ASFProduct import ASFProduct from .ASFSearchResults import ASFSearchResults From 8816940c0a15cdbb5f1ae2f24bdacd011880d238 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Tue, 11 Jul 2023 11:54:13 -0800 Subject: [PATCH 02/40] Moved logging up, to log when setup doesn't work correctly --- asf_search/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asf_search/__init__.py b/asf_search/__init__.py index 66d755ac..9b61ad30 100644 --- a/asf_search/__init__.py +++ b/asf_search/__init__.py @@ -23,7 +23,7 @@ from .ASFSession import ASFSession from .ASFProduct import ASFProduct from .ASFSearchResults import ASFSearchResults -from .ASFSearchOptions import ASFSearchOptions, validators +from .ASFSearchOptions import ASFSearchOptions, validators, validator_map from .exceptions import * from .constants import * from .health import * From 8ea19aee3beb3e857dc2d9313bc5abea6532770b Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Wed, 9 Aug 2023 11:12:23 -0800 Subject: [PATCH 03/40] update for using asf_search in SearchAPI --- asf_search/ASFSearchOptions/validators.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index b4b6f88f..d83ee105 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -21,7 +21,7 @@ def parse_string(value: str) -> str: except ValueError as exc: # If this happens, printing v's value would fail too... raise ValueError(f"Invalid string: Can't cast type '{type(value)}' to string.") from exc if len(value) == 0: - raise ValueError(f'Invalid string: Empty.') + raise ValueError('Invalid string: Empty.') return value @@ -35,7 +35,7 @@ def parse_float(value: float) -> float: value = float(value) except ValueError as exc: raise ValueError(f'Invalid float: {value}') from exc - if math.isinf(value): + if math.isinf(value) or math.isnan(value): raise ValueError(f'Float values must be finite: got {value}') return value From c91bb79b468a970f13abea0c9b8c2374498690d2 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Thu, 10 Aug 2023 12:43:17 -0800 Subject: [PATCH 04/40] Adding in support for 'circle' keyword. Exposed in ASFSearchOptions, and uses CMR's key to search for results --- asf_search/ASFSearchOptions/validator_map.py | 3 ++- asf_search/ASFSearchOptions/validators.py | 8 ++++++++ asf_search/CMR/field_map.py | 1 + asf_search/CMR/subquery.py | 3 ++- asf_search/CMR/translate.py | 5 +++++ asf_search/search/search_generator.py | 8 ++++---- tests/yml_tests/test_ASFSearchOptions.yml | 6 ++++++ tests/yml_tests/test_search.yml | 7 +++++++ 8 files changed, 35 insertions(+), 6 deletions(-) diff --git a/asf_search/ASFSearchOptions/validator_map.py b/asf_search/ASFSearchOptions/validator_map.py index 64162bc1..6ada5cd0 100644 --- a/asf_search/ASFSearchOptions/validator_map.py +++ b/asf_search/ASFSearchOptions/validator_map.py @@ -3,7 +3,7 @@ from .validators import ( parse_string, parse_float, parse_wkt, parse_date, parse_string_list, parse_int_list, parse_int_or_range_list, - parse_float_or_range_list, + parse_float_or_range_list, parse_circle, parse_session ) @@ -32,6 +32,7 @@ def validate(key, value): 'beamMode': parse_string_list, 'beamSwath': parse_string_list, 'campaign': parse_string, + 'circle': parse_circle, 'maxDoppler': parse_float, 'minDoppler': parse_float, 'maxFaradayRotation': parse_float, diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index d83ee105..a9c8e87a 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -189,6 +189,14 @@ def parse_wkt(value: str) -> str: raise ValueError(f'Invalid wkt: {exc}') from exc return wkt.dumps(value) +# Parse a CMR circle: +# [longitude, latitude, radius(meters)] +def parse_circle(value: List[float]) -> str: + value = parse_float_list(value) + if len(value) != 3: + raise ValueError(f'Invalid circle, must be 3 values (lat, long, radius). Got: {value}') + return value + # Take "requests.Session", or anything that subclasses it: def parse_session(session: Type[requests.Session]): if issubclass(type(session), requests.Session): diff --git a/asf_search/CMR/field_map.py b/asf_search/CMR/field_map.py index 561ede0a..49bdbdf2 100644 --- a/asf_search/CMR/field_map.py +++ b/asf_search/CMR/field_map.py @@ -9,6 +9,7 @@ 'beamMode': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE,{0}'}, 'beamSwath': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE_TYPE,{0}'}, 'campaign': {'key': 'attribute[]', 'fmt': 'string,MISSION_NAME,{0}'}, + 'circle': {'key': 'circle', 'fmt': '{0}'}, 'maxDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,,{0}'}, 'minDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,{0},'}, 'maxFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,,{0}'}, diff --git a/asf_search/CMR/subquery.py b/asf_search/CMR/subquery.py index f37a0ef0..84da799f 100644 --- a/asf_search/CMR/subquery.py +++ b/asf_search/CMR/subquery.py @@ -22,7 +22,7 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: if params.get('product_list') is not None: params['product_list'] = chunk_list(params['product_list'], CMR_PAGE_SIZE) - list_param_names = ['platform', 'season', 'collections'] # these parameters will dodge the subquery system + list_param_names = ['platform', 'season', 'collections', 'circle'] # these parameters will dodge the subquery system skip_param_names = ['maxResults']# these params exist in opts, but shouldn't be passed on to subqueries at ALL params = dict([ (k, v) for k, v in params.items() if k not in skip_param_names ]) @@ -37,6 +37,7 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: sub_queries = cartesian_product(subquery_params) final_sub_query_opts = [] + for query in sub_queries: q = dict() for p in query: diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index 867a68e7..7db41576 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -43,6 +43,10 @@ def translate_opts(opts: ASFSearchOptions) -> list: (shapeType, shape) = wkt_to_cmr_shape(shape).split(':') dict_opts[shapeType] = shape + if "circle" in dict_opts: + # Map: to convert floats to strings before joining: + dict_opts['circle'] = ','.join(map(str, dict_opts['circle'])) + # If you need to use the temporal key: if any(key in dict_opts for key in ['start', 'end', 'season']): dict_opts = fix_date(dict_opts) @@ -83,6 +87,7 @@ def translate_opts(opts: ASFSearchOptions) -> list: cmr_opts.extend(additional_keys) + print(f"cmr_opts: {cmr_opts}") return cmr_opts diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index 156e50d5..b43de86e 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -89,13 +89,13 @@ def search_generator( while(cmr_search_after_header is not None): try: items, subquery_max_results, cmr_search_after_header = query_cmr(opts.session, url, translated_opts, subquery_count) - except (ASFSearchError, CMRIncompleteError) as e: - message = str(e) + except (ASFSearchError, CMRIncompleteError) as exc: + message = str(exc) logging.error(message) report_search_error(query, message) opts.session.headers.pop('CMR-Search-After', None) - return - + raise + opts.session.headers.update({'CMR-Search-After': cmr_search_after_header}) last_page = process_page(items, maxResults, subquery_max_results, total, subquery_count, opts) subquery_count += len(last_page) diff --git a/tests/yml_tests/test_ASFSearchOptions.yml b/tests/yml_tests/test_ASFSearchOptions.yml index 9408283c..e4a0f061 100644 --- a/tests/yml_tests/test_ASFSearchOptions.yml +++ b/tests/yml_tests/test_ASFSearchOptions.yml @@ -161,3 +161,9 @@ tests: expect_output: host: does-not-exist.asf.alaska.edu provider: TOTALLY NOT ASF + +- test-ASFSearchOptions - Circle works: + exception: Null + circle: [0, 0, 100] + expect_output: + circle: [0, 0, 100] diff --git a/tests/yml_tests/test_search.yml b/tests/yml_tests/test_search.yml index 97449d63..87239864 100644 --- a/tests/yml_tests/test_search.yml +++ b/tests/yml_tests/test_search.yml @@ -42,3 +42,10 @@ tests: platform: "Sentinel-1" status_code: 500 report: "Server Error: This is a Test Error" + +- test-ASFSearch-search-error 400-Error circle radius too small: + parameters: + circle: [0, 0, 2] + status_code: 400 + report: "Circle radius must be between 10 and 6000000, but was 2.0" + From f0f9172653010eb72b1e98f22f4a3d5d49040dae Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Thu, 10 Aug 2023 12:52:59 -0800 Subject: [PATCH 05/40] Removed debug print I forgot about --- asf_search/CMR/translate.py | 1 - 1 file changed, 1 deletion(-) diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index 7db41576..9b691671 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -87,7 +87,6 @@ def translate_opts(opts: ASFSearchOptions) -> list: cmr_opts.extend(additional_keys) - print(f"cmr_opts: {cmr_opts}") return cmr_opts From 92e12b34f724f8806c88e393780a1ecb776473c3 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Wed, 16 Aug 2023 20:54:04 -0800 Subject: [PATCH 06/40] Exposed get_urls method, to use in SearchAPI. Fixed bug where fileType would throw keyerror if used on a non-burst --- asf_search/ASFProduct.py | 50 +++++++++++++++++++++++++--------------- 1 file changed, 32 insertions(+), 18 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index 67694e82..dfdf7475 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -24,6 +24,11 @@ def __init__(self, args: dict = {}, session: ASFSession = ASFSession()): self.baseline = translated['baseline'] self.session = session + if 'additionalUrls' not in self.properties or len(self.properties['additionalUrls']) == 0: + self.multiple_files = False + else: + self.multiple_files = True + def __str__(self): return json.dumps(self.geojson(), indent=2, sort_keys=True) @@ -48,41 +53,50 @@ def download(self, path: str, filename: str = None, session: ASFSession = None, default_filename = self.properties['fileName'] if filename is not None: - multiple_files = ( - (fileType == FileDownloadType.ADDITIONAL_FILES and len(self.properties['additionalUrls']) > 1) - or fileType == FileDownloadType.ALL_FILES - ) - if multiple_files: - warnings.warn(f"Attempting to download multiple files for product, ignoring user provided filename argument \"{filename}\", using default.") + # Check if we should support the filename argument: + if self.multiple_files and fileType in [FileDownloadType.ADDITIONAL_FILES, FileDownloadType.ALL_FILES]: + warnings.warn(f"Attempting to download multiple files for product, ignoring user provided filename argument '{filename}', using default.") else: default_filename = filename - + if session is None: session = self.session + urls = self.get_urls(fileType=fileType) + + for url in urls: + base_filename = '.'.join(default_filename.split('.')[:-1]) + extension = url.split('.')[-1] + download_url( + url=url, + path=path, + filename=f"{base_filename}.{extension}", + session=session + ) + + def get_urls(self, fileType = FileDownloadType.DEFAULT_FILE) -> list: urls = [] def get_additional_urls(): - output = [] - base_filename = '.'.join(default_filename.split('.')[:-1]) + if not self.multiple_files: + ASF_LOGGER.warning(f"You attempted to download multiple files from {self.properties['sceneName']}, this product only has one file to download.") + return [] + + additional_urls = [] for url in self.properties['additionalUrls']: - extension = url.split('.')[-1] - urls.append((f"{base_filename}.{extension}", url)) - - return output + additional_urls.append(url) + return additional_urls if fileType == FileDownloadType.DEFAULT_FILE: - urls.append((default_filename, self.properties['url'])) + urls.append(self.properties['url']) elif fileType == FileDownloadType.ADDITIONAL_FILES: urls.extend(get_additional_urls()) elif fileType == FileDownloadType.ALL_FILES: - urls.append((default_filename, self.properties['url'])) + urls.append(self.properties['url']) urls.extend(get_additional_urls()) else: raise ValueError("Invalid FileDownloadType provided, the valid types are 'DEFAULT_FILE', 'ADDITIONAL_FILES', and 'ALL_FILES'") - - for filename, url in urls: - download_url(url=url, path=path, filename=filename, session=session) + return urls def stack( self, From 5972616694087f3b9d4d09e4984e2800bb7c1eb3 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Wed, 16 Aug 2023 20:56:29 -0800 Subject: [PATCH 07/40] Made it clear that baseline doesn't support classic search options. Throws warning and wipes them if provided --- asf_search/search/baseline_search.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index 026137db..7fccb849 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -1,8 +1,10 @@ -from asf_search.baseline.stack import get_baseline_from_stack, get_default_product_type + from copy import copy +from asf_search import ASF_LOGGER +from asf_search.baseline.stack import get_baseline_from_stack, get_default_product_type from asf_search.search import search, product_search -from asf_search.ASFSearchOptions import ASFSearchOptions +from asf_search.ASFSearchOptions import ASFSearchOptions, config from asf_search.ASFSearchResults import ASFSearchResults from asf_search.ASFProduct import ASFProduct from asf_search.constants import PLATFORM @@ -30,9 +32,6 @@ def stack_from_product( :return: ASFSearchResults(dict) of search results """ - - opts = (ASFSearchOptions() if opts is None else copy(opts)) - stack_opts = get_stack_opts(reference, opts=opts) stack = search(opts=stack_opts) @@ -58,10 +57,8 @@ def stack_from_id( :return: ASFSearchResults(list) of search results """ - opts = (ASFSearchOptions() if opts is None else copy(opts)) - reference_results = product_search(product_list=reference_id, opts=opts) reference_results.raise_if_incomplete() @@ -78,7 +75,15 @@ def get_stack_opts( opts: ASFSearchOptions = None ) -> ASFSearchOptions: - stack_opts = (ASFSearchOptions() if opts is None else copy(opts)) + if opts is None: + stack_opts = ASFSearchOptions() + else: + stack_opts = copy(opts) + # If they set any search-specific keys inside the opts (exclude 'provider' and such): + if stack_opts: + ASF_LOGGER.warning(f'Baseline search options provided, but only the service config options will be used. [{config.config.keys()}]') + stack_opts.reset_search() + stack_opts.processingLevel = get_default_product_type(reference) if reference.properties['platform'] in precalc_platforms: @@ -88,7 +93,7 @@ def get_stack_opts( raise ASFBaselineError(f'Requested reference product needs a baseline stack ID but does not have one: {reference.properties["fileID"]}') # build a stack from scratch if it's a non-precalc dataset with state vectors - + if reference.properties['processingLevel'] == 'BURST': stack_opts.fullBurstID = reference.properties['burst']['fullBurstID'] stack_opts.polarization = [reference.properties['polarization']] From d19a24f543ab355998354577d8b2df5cb8536f55 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Fri, 18 Aug 2023 12:19:04 -0800 Subject: [PATCH 08/40] Updated with where this branch is currently at --- CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 540279f5..e975886a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,17 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - --> +------ +## [TODO](https://github.com/asfadmin/Discovery-asf_search/compare/v6.6.2...TODO) +### Added +- Added `asf.ASFSearchOptions(circle=[lat, long, radius])` search param. Takes list of exactly 3 numbers. +- Exposed `asf.validator_map`, which given a ops search param, can be used to look up which method we're going to validate it against. +- Exposed `ASFProduct.get_urls` which returns the URL's for it's products directly. Can control which products with the `fileType` enum. +### Fixed +- Fixed bug in `ASFProduct` where asking for `asf.ADDITIONAL_FILES` on non-burst products would throw a KeyError. +### Changed +- `stack_from_id()` now raises if results are incomplete, before checking if reference was found + ------ ## [v6.6.2](https://github.com/asfadmin/Discovery-asf_search/compare/v6.6.1...v6.6.2) ### Added From 9dc3a3396323cb4277c439b6a51c893f3f35b808 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Thu, 31 Aug 2023 11:36:07 -0800 Subject: [PATCH 09/40] Added circle key to search, useable without opts. Also made it so generator won't throw on incomplete results, but .search directly will --- asf_search/search/search.py | 5 +++++ asf_search/search/search_generator.py | 9 ++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/asf_search/search/search.py b/asf_search/search/search.py index bf7b0529..c2967d7e 100644 --- a/asf_search/search/search.py +++ b/asf_search/search/search.py @@ -12,6 +12,7 @@ def search( beamMode: Union[str, Iterable[str]] = None, beamSwath: Union[str, Iterable[str]] = None, campaign: Union[str, Iterable[str]] = None, + circle: Tuple[float, float, float] = None, maxDoppler: float = None, minDoppler: float = None, end: Union[datetime.datetime, str] = None, @@ -51,6 +52,7 @@ def search( :param beamMode: The beam mode used to acquire the data. :param beamSwath: Encompasses a look angle and beam mode. :param campaign: For UAVSAR and AIRSAR data collections only. Search by general location, site description, or data grouping as supplied by flight agency or project. + :param circle: Search by circle defined by list of three floats: [longitude, latitude, radius in meters] :param maxDoppler: Doppler provides an indication of how much the look direction deviates from the ideal perpendicular flight direction acquisition. :param minDoppler: Doppler provides an indication of how much the look direction deviates from the ideal perpendicular flight direction acquisition. :param end: End date of data acquisition. Supports timestamps as well as natural language such as "3 weeks ago" @@ -95,6 +97,9 @@ def search( results.searchComplete = page.searchComplete results.searchOptions = page.searchOptions + # Raise if they didn't get everything. If you're okay with partial + # results, use asf.search_generator directly + results.raise_if_incomplete() results.sort(key=lambda p: (p.properties['stopTime'], p.properties['fileID']), reverse=True) return results diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index b43de86e..d0dd2a22 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -27,6 +27,7 @@ def search_generator( beamMode: Union[str, Iterable[str]] = None, beamSwath: Union[str, Iterable[str]] = None, campaign: Union[str, Iterable[str]] = None, + circle: Tuple[float, float, float] = None, maxDoppler: float = None, minDoppler: float = None, end: Union[datetime.datetime, str] = None, @@ -94,7 +95,13 @@ def search_generator( logging.error(message) report_search_error(query, message) opts.session.headers.pop('CMR-Search-After', None) - raise + # If it's a CMRIncompleteError, we can just stop here and return what we have + # It's up to the user to call .raise_if_incomplete() if they're using the + # generator directly. + if type(exc) == CMRIncompleteError: + return + else: + raise opts.session.headers.update({'CMR-Search-After': cmr_search_after_header}) last_page = process_page(items, maxResults, subquery_max_results, total, subquery_count, opts) From 45460e3edab3b995db3c75ad7720ce521b9d31af Mon Sep 17 00:00:00 2001 From: kim Date: Thu, 31 Aug 2023 16:18:11 -0800 Subject: [PATCH 10/40] updates download file test case. Adds _has_multiple_files() method to ASFProduct --- asf_search/ASFProduct.py | 12 +++++------- tests/ASFProduct/test_ASFProduct.py | 2 +- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index dfdf7475..04de72df 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -24,11 +24,6 @@ def __init__(self, args: dict = {}, session: ASFSession = ASFSession()): self.baseline = translated['baseline'] self.session = session - if 'additionalUrls' not in self.properties or len(self.properties['additionalUrls']) == 0: - self.multiple_files = False - else: - self.multiple_files = True - def __str__(self): return json.dumps(self.geojson(), indent=2, sort_keys=True) @@ -54,7 +49,7 @@ def download(self, path: str, filename: str = None, session: ASFSession = None, if filename is not None: # Check if we should support the filename argument: - if self.multiple_files and fileType in [FileDownloadType.ADDITIONAL_FILES, FileDownloadType.ALL_FILES]: + if self._has_multiple_files() and fileType in [FileDownloadType.ADDITIONAL_FILES, FileDownloadType.ALL_FILES]: warnings.warn(f"Attempting to download multiple files for product, ignoring user provided filename argument '{filename}', using default.") else: default_filename = filename @@ -78,7 +73,7 @@ def get_urls(self, fileType = FileDownloadType.DEFAULT_FILE) -> list: urls = [] def get_additional_urls(): - if not self.multiple_files: + if not self._has_multiple_files(): ASF_LOGGER.warning(f"You attempted to download multiple files from {self.properties['sceneName']}, this product only has one file to download.") return [] @@ -148,3 +143,6 @@ def remotezip(self, session: ASFSession) -> RemoteZip: from .download.download import remotezip return remotezip(self.properties['url'], session=session) + + def _has_multiple_files(self): + return 'additionalUrls' in self.properties and len(self.properties['additionalUrls']) > 0 diff --git a/tests/ASFProduct/test_ASFProduct.py b/tests/ASFProduct/test_ASFProduct.py index 22102d9c..e5cfd9e1 100644 --- a/tests/ASFProduct/test_ASFProduct.py +++ b/tests/ASFProduct/test_ASFProduct.py @@ -78,7 +78,7 @@ def run_test_ASFProduct_download(reference, filename, filetype, additional_urls) with patch('builtins.open', unittest.mock.mock_open()) as m: if filename != None and ( (filetype == FileDownloadType.ADDITIONAL_FILES and len(additional_urls) > 1) - or filetype == FileDownloadType.ALL_FILES + or (filetype == FileDownloadType.ALL_FILES and len(additional_urls) > 0) ): with pytest.warns(Warning): product.download('./', filename=filename, fileType=filetype) From e4f45ed9d45a7be5c5d41b495df4581178b563cd Mon Sep 17 00:00:00 2001 From: kim Date: Wed, 7 Feb 2024 16:32:48 -0900 Subject: [PATCH 11/40] removes missing import in baseline_searhc.py --- asf_search/search/baseline_search.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index 07c6a0c2..4d7da17f 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -3,10 +3,8 @@ from asf_search import ASF_LOGGER from copy import copy -from asf_search import ASF_LOGGER -from asf_search.baseline.stack import get_baseline_from_stack, get_default_product_type from asf_search.search import search, product_search -from asf_search.ASFSearchOptions import ASFSearchOptions, config +from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.ASFSearchResults import ASFSearchResults from asf_search import ASFProduct from asf_search.constants import PLATFORM From ec570e7cf410f7c1c8abf937f803266c03c37654 Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 12 Feb 2024 15:03:34 -0900 Subject: [PATCH 12/40] adds some checks for unavailable fields in jsonlite --- asf_search/export/jsonlite.py | 13 +++++++++++-- asf_search/export/jsonlite2.py | 6 +++++- asf_search/search/search.py | 1 + asf_search/search/search_generator.py | 8 +++++++- tests/Search/test_search.py | 2 +- 5 files changed, 25 insertions(+), 5 deletions(-) diff --git a/asf_search/export/jsonlite.py b/asf_search/export/jsonlite.py index 8f581cfd..56848b35 100644 --- a/asf_search/export/jsonlite.py +++ b/asf_search/export/jsonlite.py @@ -129,7 +129,7 @@ def getItem(self, p): pass try: - p['frameNumber'] = int(p['frameNumber']) + p['frameNumber'] = int(p.get('frameNumber')) except TypeError: pass @@ -176,13 +176,22 @@ def getItem(self, p): if result[key] in [ 'NA', 'NULL']: result[key] = None - if 'temporalBaseline' in p.keys() or 'perpendicularBaseline' in p.keys(): + if 'temporalBaseline' in p.keys(): result['temporalBaseline'] = p['temporalBaseline'] + if 'perpendicularBaseline' in p.keys(): result['perpendicularBaseline'] = p['perpendicularBaseline'] if p.get('processingLevel') == 'BURST': # is a burst product result['burst'] = p['burst'] + if p.get('operaBurstID') is not None or result['productID'].startswith('OPERA'): + result['opera'] = { + 'operaBurstID': p.get('operaBurstID'), + 'additionalUrls': p.get('additionalUrls'), + } + if p.get('validityStartDate'): + result['opera']['validityStartDate'] = p.get('validityStartDate') + return result def getOutputType(self) -> str: diff --git a/asf_search/export/jsonlite2.py b/asf_search/export/jsonlite2.py index 5cd936b2..125363df 100644 --- a/asf_search/export/jsonlite2.py +++ b/asf_search/export/jsonlite2.py @@ -54,12 +54,16 @@ def getItem(self, p): 'pge': p['pgeVersion'] } - if 'temporalBaseline' in p.keys() or 'perpendicularBaseline' in p.keys(): + if 'temporalBaseline' in p.keys(): result['tb'] = p['temporalBaseline'] + if 'perpendicularBaseline' in p.keys(): result['pb'] = p['perpendicularBaseline'] if p.get('burst') is not None: # is a burst product result['s1b'] = p['burst'] + + if p.get('opera') is not None: + result['s1o'] = p['opera'] return result diff --git a/asf_search/search/search.py b/asf_search/search/search.py index e91eac0c..4627ebbb 100644 --- a/asf_search/search/search.py +++ b/asf_search/search/search.py @@ -99,6 +99,7 @@ def search( results.searchComplete = page.searchComplete results.searchOptions = page.searchOptions + results.raise_if_incomplete() results.sort(key=lambda p: p.get_sort_keys(), reverse=True) return results diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index 59d8e5f1..14052ee2 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -103,7 +103,13 @@ def search_generator( logging.error(message) report_search_error(query, message) opts.session.headers.pop('CMR-Search-After', None) - return + # If it's a CMRIncompleteError, we can just stop here and return what we have + # It's up to the user to call .raise_if_incomplete() if they're using the + # generator directly. + if type(exc) == CMRIncompleteError: + return + else: + raise opts.session.headers.update({'CMR-Search-After': cmr_search_after_header}) last_page = process_page(items, maxResults, subquery_max_results, total, subquery_count, opts) diff --git a/tests/Search/test_search.py b/tests/Search/test_search.py index 7ef9df95..a95b6928 100644 --- a/tests/Search/test_search.py +++ b/tests/Search/test_search.py @@ -85,7 +85,7 @@ def custom_matcher(request: requests.Request): results = search(opts=searchOptions) assert results is not None - assert 0 < len(results) <= INTERNAL.CMR_PAGE_SIZE + assert 0 <= len(results) <= INTERNAL.CMR_PAGE_SIZE with raises(ASFSearchError): results.raise_if_incomplete() From 272b3cafd2002259c5bf376a0451ba47715762b4 Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 26 Feb 2024 17:06:38 -0900 Subject: [PATCH 13/40] updated to searchapi baseline tests --- asf_search/ASFStackableProduct.py | 7 +++++++ asf_search/Products/S1Product.py | 4 ++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/asf_search/ASFStackableProduct.py b/asf_search/ASFStackableProduct.py index 60c3830e..b85f0626 100644 --- a/asf_search/ASFStackableProduct.py +++ b/asf_search/ASFStackableProduct.py @@ -73,3 +73,10 @@ def get_default_baseline_product_type() -> Union[str, None]: Returns the product type to search for when building a baseline stack. """ return None + + def has_baseline(self) -> bool: + baseline = self.get_baseline_calc_properties() + + return ( + baseline is not None + ) \ No newline at end of file diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index 25282de7..329e37de 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -30,10 +30,10 @@ class S1Product(ASFStackableProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - if self._has_baseline(): + if self.has_baseline(): self.baseline = self.get_baseline_calc_properties() - def _has_baseline(self) -> bool: + def has_baseline(self) -> bool: baseline = self.get_baseline_calc_properties() return ( From a3d1221843fcffa105904027beb822a8e351cdee Mon Sep 17 00:00:00 2001 From: kim Date: Wed, 28 Feb 2024 16:23:50 -0900 Subject: [PATCH 14/40] line string work --- asf_search/ASFSearchOptions/validator_map.py | 5 +++-- asf_search/ASFSearchOptions/validators.py | 8 ++++++++ asf_search/CMR/subquery.py | 2 +- asf_search/CMR/translate.py | 3 +++ asf_search/Products/ALOSProduct.py | 1 + asf_search/search/search.py | 1 + asf_search/search/search_generator.py | 1 + 7 files changed, 18 insertions(+), 3 deletions(-) diff --git a/asf_search/ASFSearchOptions/validator_map.py b/asf_search/ASFSearchOptions/validator_map.py index b45baa3a..89b12aec 100644 --- a/asf_search/ASFSearchOptions/validator_map.py +++ b/asf_search/ASFSearchOptions/validator_map.py @@ -3,7 +3,7 @@ from .validators import ( parse_string, parse_float, parse_wkt, parse_date, parse_string_list, parse_int_list, parse_int_or_range_list, - parse_float_or_range_list, parse_circle, + parse_float_or_range_list, parse_circle, parse_linestring, parse_session ) @@ -32,7 +32,8 @@ def validate(key, value): 'beamMode': parse_string_list, 'beamSwath': parse_string_list, 'campaign': parse_string, - 'circle': parse_circle, + 'circle': parse_circle, + 'linestring': parse_linestring, 'maxDoppler': parse_float, 'minDoppler': parse_float, 'maxFaradayRotation': parse_float, diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index 5208db9c..65d0c857 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -201,6 +201,14 @@ def parse_circle(value: List[float]) -> str: raise ValueError(f'Invalid circle, must be 3 values (lat, long, radius). Got: {value}') return value +# Parse a CMR linestring: +# [longitude, latitude, longitude, latitude, ...] +def parse_linestring(value: List[float]) -> str: + value = parse_float_list(value) + if len(value) % 2 != 0: + raise ValueError(f'Invalid linestring, must be values of format (lat, long, lat, long, ...). Got: {value}') + return value + # Take "requests.Session", or anything that subclasses it: def parse_session(session: Type[requests.Session]): if issubclass(type(session), requests.Session): diff --git a/asf_search/CMR/subquery.py b/asf_search/CMR/subquery.py index f5fb5d08..cab51f94 100644 --- a/asf_search/CMR/subquery.py +++ b/asf_search/CMR/subquery.py @@ -22,7 +22,7 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: if params.get(chunked_key) is not None: params[chunked_key] = chunk_list(params[chunked_key], CMR_PAGE_SIZE) - list_param_names = ['platform', 'season', 'collections', 'circle', 'dataset'] # these parameters will dodge the subquery system + list_param_names = ['platform', 'season', 'collections', 'circle', 'linestring', 'dataset'] # these parameters will dodge the subquery system skip_param_names = ['maxResults']# these params exist in opts, but shouldn't be passed on to subqueries at ALL collections, aliased_keywords = get_keyword_concept_ids(params, opts.collectionAlias) diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index 9944171e..af6e3d26 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -49,6 +49,9 @@ def translate_opts(opts: ASFSearchOptions) -> List: # Map: to convert floats to strings before joining: dict_opts['circle'] = ','.join(map(str, dict_opts['circle'])) + # if "linestring" in dict_opts: + # dict_opts['linestring'] = ','.join(map(str, dict_opts['linestring'])) + # If you need to use the temporal key: if any(key in dict_opts for key in ['start', 'end', 'season']): dict_opts = fix_date(dict_opts) diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index 9f31011b..d90902f7 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -16,6 +16,7 @@ class ALOSProduct(ASFStackableProduct): 'offNadirAngle': {'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0], 'cast': try_parse_float}, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): diff --git a/asf_search/search/search.py b/asf_search/search/search.py index 4627ebbb..51559135 100644 --- a/asf_search/search/search.py +++ b/asf_search/search/search.py @@ -13,6 +13,7 @@ def search( beamSwath: Union[str, Sequence[str]] = None, campaign: Union[str, Sequence[str]] = None, circle: Tuple[float, float, float] = None, + linestring: Sequence[float] = None, maxDoppler: float = None, minDoppler: float = None, end: Union[datetime.datetime, str] = None, diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index 14052ee2..b3e4c9d8 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -31,6 +31,7 @@ def search_generator( beamSwath: Union[str, Sequence[str]] = None, campaign: Union[str, Sequence[str]] = None, circle: Tuple[float, float, float] = None, + linestring: Sequence[float] = None, maxDoppler: float = None, minDoppler: float = None, end: Union[datetime.datetime, str] = None, From 3bb63e082f70d2e5a418b20fa01b75736a5a0aee Mon Sep 17 00:00:00 2001 From: kim Date: Thu, 29 Feb 2024 13:32:25 -0900 Subject: [PATCH 15/40] moves linestring repair before intersectsWith conversion --- asf_search/CMR/translate.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index af6e3d26..65efefea 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -8,7 +8,7 @@ from shapely.geometry import Polygon from shapely.geometry.base import BaseGeometry from .field_map import field_map -from .datasets import dataset_collections, collections_per_platform +from .datasets import collections_per_platform import logging @@ -24,6 +24,14 @@ def translate_opts(opts: ASFSearchOptions) -> List: if escape_commas in dict_opts: dict_opts[escape_commas] = dict_opts[escape_commas].replace(",", "\,") + + if "linestring" in dict_opts: + dict_opts['linestring'] = ','.join(map(str, dict_opts['linestring'])) + + if "circle" in dict_opts: + # Map: to convert floats to strings before joining: + dict_opts['circle'] = ','.join(map(str, dict_opts['circle'])) + # Special case to unravel WKT field a little for compatibility if "intersectsWith" in dict_opts: shape = wkt.loads(dict_opts.pop('intersectsWith', None)) @@ -45,12 +53,6 @@ def translate_opts(opts: ASFSearchOptions) -> List: (shapeType, shape) = wkt_to_cmr_shape(shape).split(':') dict_opts[shapeType] = shape - if "circle" in dict_opts: - # Map: to convert floats to strings before joining: - dict_opts['circle'] = ','.join(map(str, dict_opts['circle'])) - - # if "linestring" in dict_opts: - # dict_opts['linestring'] = ','.join(map(str, dict_opts['linestring'])) # If you need to use the temporal key: if any(key in dict_opts for key in ['start', 'end', 'season']): From 495faa9ee51770d97a6df1c71902cb205c4a5764 Mon Sep 17 00:00:00 2001 From: kim Date: Tue, 5 Mar 2024 11:34:50 -0900 Subject: [PATCH 16/40] bugfix: fixes range params, changes old exception text, adds non-polygon shapes --- asf_search/ASFSearchOptions/validator_map.py | 9 ++++++- asf_search/ASFSearchOptions/validators.py | 19 ++++++++++--- asf_search/CMR/subquery.py | 28 +++++++++++++++++--- asf_search/CMR/translate.py | 27 +++++++++++++------ asf_search/exceptions.py | 9 ++----- asf_search/search/search_generator.py | 3 ++- 6 files changed, 72 insertions(+), 23 deletions(-) diff --git a/asf_search/ASFSearchOptions/validator_map.py b/asf_search/ASFSearchOptions/validator_map.py index 0830fbde..fc7a2a8d 100644 --- a/asf_search/ASFSearchOptions/validator_map.py +++ b/asf_search/ASFSearchOptions/validator_map.py @@ -4,7 +4,7 @@ parse_string, parse_float, parse_wkt, parse_date, parse_string_list, parse_int_list, parse_int_or_range_list, parse_float_or_range_list, parse_circle, parse_linestring, - parse_cmr_keywords_list, + parse_cmr_keywords_list, parse_point, parse_coord_string, parse_session ) @@ -35,10 +35,17 @@ def validate(key, value): 'campaign': parse_string, 'circle': parse_circle, 'linestring': parse_linestring, + 'point': parse_point, + 'maxBaselinePerp': parse_float, + 'minBaselinePerp': parse_float, + 'maxInsarStackSize': parse_float, + 'minInsarStackSize': parse_float, 'maxDoppler': parse_float, 'minDoppler': parse_float, 'maxFaradayRotation': parse_float, 'minFaradayRotation': parse_float, + 'maxInsarStackSize': parse_int_or_range_list, + 'minInsarStackSize': parse_int_or_range_list, 'flightDirection': parse_string, 'flightLine': parse_string, 'frame': parse_int_or_range_list, diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index 05518952..5a84560e 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -52,7 +52,7 @@ def parse_date(value: Union[str, datetime.datetime]) -> str: date = dateparser.parse(str(value)) if date is None: raise ValueError(f"Invalid date: '{value}'.") - return str(value) + return str(date.date()) def parse_range(value: Tuple[number, number], h: Callable[[number], number]) -> Tuple[number, number]: @@ -213,7 +213,7 @@ def parse_wkt(value: str) -> str: # Parse a CMR circle: # [longitude, latitude, radius(meters)] -def parse_circle(value: List[float]) -> str: +def parse_circle(value: List[float]) -> List[float]: value = parse_float_list(value) if len(value) != 3: raise ValueError(f'Invalid circle, must be 3 values (lat, long, radius). Got: {value}') @@ -221,12 +221,25 @@ def parse_circle(value: List[float]) -> str: # Parse a CMR linestring: # [longitude, latitude, longitude, latitude, ...] -def parse_linestring(value: List[float]) -> str: +def parse_linestring(value: List[float]) -> List[float]: value = parse_float_list(value) if len(value) % 2 != 0: raise ValueError(f'Invalid linestring, must be values of format (lat, long, lat, long, ...). Got: {value}') return value +def parse_point(value: List[float]) -> List[float]: + value = parse_float_list(value) + if len(value) != 2: + raise ValueError(f'Invalid point, must be values of format (lat, long). Got: {value}') + return value + +# Parse and validate a coordinate string +def parse_coord_string(value: List): + value = parse_float_list(value) + if len(value) % 2 != 0: + raise ValueError(f'Invalid coordinate string, must be values of format (lat, long, lat, long, ...). Got: {value}') + return value + # Take "requests.Session", or anything that subclasses it: def parse_session(session: Type[requests.Session]): if issubclass(type(session), requests.Session): diff --git a/asf_search/CMR/subquery.py b/asf_search/CMR/subquery.py index cab51f94..8b2c52c4 100644 --- a/asf_search/CMR/subquery.py +++ b/asf_search/CMR/subquery.py @@ -1,10 +1,10 @@ -from typing import List, Optional, Tuple +from typing import List, Tuple import itertools from copy import copy from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.constants import CMR_PAGE_SIZE - +from asf_search.CMR.field_map import field_map from asf_search.CMR.datasets import collections_by_processing_level, collections_per_platform, dataset_collections, get_concept_id_alias, get_dataset_concept_ids from numpy import intersect1d, union1d @@ -22,7 +22,7 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: if params.get(chunked_key) is not None: params[chunked_key] = chunk_list(params[chunked_key], CMR_PAGE_SIZE) - list_param_names = ['platform', 'season', 'collections', 'circle', 'linestring', 'dataset'] # these parameters will dodge the subquery system + list_param_names = ['platform', 'season', 'collections', 'circle', 'linestring', 'point', 'dataset'] # these parameters will dodge the subquery system skip_param_names = ['maxResults']# these params exist in opts, but shouldn't be passed on to subqueries at ALL collections, aliased_keywords = get_keyword_concept_ids(params, opts.collectionAlias) @@ -137,6 +137,28 @@ def format_query_params(params) -> List[List[dict]]: def translate_param(param_name, param_val) -> List[dict]: + # param_list = [] + + # cmr_input_map = field_map + + # param_input_map = cmr_input_map[param_name] + # cmr_param = param_input_map['key'] + # cmr_format_str = param_input_map['fmt'] + + # if not isinstance(param_val, list): + # param_val = [param_val] + + # for l in param_val: + # format_val = l + + # if isinstance(l, list): + # format_val = ','.join([f'{t}' for t in l]) + + # param_list.append({ + # cmr_param: cmr_format_str.format(format_val) + # }) + + # return param_list param_list = [] if not isinstance(param_val, list): diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index 20a6e198..8d736379 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -24,13 +24,7 @@ def translate_opts(opts: ASFSearchOptions) -> List: if escape_commas in dict_opts: dict_opts[escape_commas] = dict_opts[escape_commas].replace(",", "\,") - - if "linestring" in dict_opts: - dict_opts['linestring'] = ','.join(map(str, dict_opts['linestring'])) - - if "circle" in dict_opts: - # Map: to convert floats to strings before joining: - dict_opts['circle'] = ','.join(map(str, dict_opts['circle'])) + dict_opts = fix_cmr_shapes(dict_opts) # Special case to unravel WKT field a little for compatibility if "intersectsWith" in dict_opts: @@ -58,6 +52,8 @@ def translate_opts(opts: ASFSearchOptions) -> List: if any(key in dict_opts for key in ['start', 'end', 'season']): dict_opts = fix_date(dict_opts) + dict_opts = fix_range_params(dict_opts) + # convert the above parameters to a list of key/value tuples cmr_opts = [] @@ -103,6 +99,14 @@ def translate_opts(opts: ASFSearchOptions) -> List: return cmr_opts +def fix_cmr_shapes(fixed_params: Dict[str, Any]) -> Dict[str, Any]: + """Fixes raw CMR lon lat coord shapes""" + for param in ['point', 'linestring', 'circle']: + if param in fixed_params: + fixed_params[param] = ','.join(map(str, fixed_params[param])) + + return fixed_params + def should_use_asf_frame(cmr_opts): asf_frame_platforms = ['SENTINEL-1A', 'SENTINEL-1B', 'ALOS'] @@ -163,7 +167,7 @@ def try_parse_float(value: str) -> Optional[float]: return float(value) -def fix_date(fixed_params: Dict[str, Any]): +def fix_date(fixed_params: Dict[str, Any]) -> Dict[str, Any]: if 'start' in fixed_params or 'end' in fixed_params or 'season' in fixed_params: fixed_params["start"] = fixed_params["start"] if "start" in fixed_params else "1978-01-01T00:00:00Z" fixed_params["end"] = fixed_params["end"] if "end" in fixed_params else datetime.utcnow().isoformat() @@ -178,6 +182,13 @@ def fix_date(fixed_params: Dict[str, Any]): return fixed_params +def fix_range_params(fixed_params: Dict[str, Any]) -> Dict[str, Any]: + """Converts ranges to comma separated strings""" + for param in ['offNadirAngle', 'relativeOrbit', 'absoluteOrbit', 'frame', 'asfFrame']: + if param in fixed_params.keys() and isinstance(fixed_params[param], list): + fixed_params[param] = ','.join([str(val) for val in fixed_params[param]]) + + return fixed_params def should_use_bbox(shape: BaseGeometry): """ diff --git a/asf_search/exceptions.py b/asf_search/exceptions.py index 8468af0e..77f77aea 100644 --- a/asf_search/exceptions.py +++ b/asf_search/exceptions.py @@ -7,16 +7,11 @@ class ASFSearchError(ASFError): class ASFSearch4xxError(ASFSearchError): - """Raise when SearchAPI returns a 4xx error""" + """Raise when CMR returns a 4xx error""" class ASFSearch5xxError(ASFSearchError): - """Raise when SearchAPI returns a 5xx error""" - - -class ASFServerError(ASFSearchError): - """Raise when SearchAPI returns an unknown error""" - + """Raise when CMR returns a 5xx error""" class ASFBaselineError(ASFSearchError): """Raise when baseline related errors occur""" diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index 9e3cf455..5589bec8 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -8,7 +8,7 @@ import dateparser import warnings -from asf_search import __version__ +from asf_search import ASF_LOGGER, __version__ from asf_search.ASFSearchResults import ASFSearchResults from asf_search.ASFSearchOptions import ASFSearchOptions @@ -93,6 +93,7 @@ def search_generator( queries = build_subqueries(opts) for query in queries: translated_opts = translate_opts(query) + ASF_LOGGER.warning(f"TRANSLATED PARAMS: {translated_opts}") cmr_search_after_header = "" subquery_count = 0 From 1a7214890a5639a2d61afb2b331387a298a406bb Mon Sep 17 00:00:00 2001 From: kim Date: Tue, 5 Mar 2024 12:17:46 -0900 Subject: [PATCH 17/40] update test case logic, remove searchapi output tests --- tests/ASFProduct/test_ASFProduct.py | 2 +- tests/ASFSession/test_ASFSession.py | 6 +++--- tests/Search/test_search.py | 9 ++------- tests/download/test_download.py | 14 +++++++------- tests/pytest-config.yml | 8 ++++---- tests/pytest-managers.py | 16 ++++++++-------- tests/yml_tests/test_ASFSearchResults.yml | 20 ++++++++++---------- 7 files changed, 35 insertions(+), 40 deletions(-) diff --git a/tests/ASFProduct/test_ASFProduct.py b/tests/ASFProduct/test_ASFProduct.py index 42214a2f..efa62c14 100644 --- a/tests/ASFProduct/test_ASFProduct.py +++ b/tests/ASFProduct/test_ASFProduct.py @@ -70,7 +70,7 @@ def run_test_product_get_stack_options(reference, options): def run_test_ASFProduct_download(reference, filename, filetype, additional_urls): product = as_ASFProduct(reference, ASFSession()) product.properties['additionalUrls'] = additional_urls - with patch('asf_search.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 200 mock_get.return_value = resp diff --git a/tests/ASFSession/test_ASFSession.py b/tests/ASFSession/test_ASFSession.py index 21382372..ff28abb3 100644 --- a/tests/ASFSession/test_ASFSession.py +++ b/tests/ASFSession/test_ASFSession.py @@ -16,7 +16,7 @@ def run_auth_with_creds(username: str, password: str): def run_auth_with_token(token: str): session = ASFSession() - with patch('asf_search.ASFSession.get') as mock_token_session: + with patch('asf_search.ASFSession.ASFSession.get') as mock_token_session: if not token.startswith('Bearer EDL'): mock_token_session.return_value.status_code = 400 session.auth_with_token(token) @@ -43,7 +43,7 @@ def run_test_asf_session_rebuild_auth( session = ASFSession() - with patch('asf_search.ASFSession.get') as mock_token_session: + with patch('asf_search.ASFSession.ASFSession.get') as mock_token_session: mock_token_session.return_value.status_code = 200 session.auth_with_token("bad_token") @@ -57,7 +57,7 @@ def run_test_asf_session_rebuild_auth( response.request.url = response_domain response.headers.update({'Authorization': 'Bearer fakeToken'}) - with patch('asf_search.ASFSession._get_domain') as hostname_patch: + with patch('asf_search.ASFSession.ASFSession._get_domain') as hostname_patch: hostname_patch.side_effect = [original_domain, response_domain] session.rebuild_auth(req, response) diff --git a/tests/Search/test_search.py b/tests/Search/test_search.py index a95b6928..c2ce45ee 100644 --- a/tests/Search/test_search.py +++ b/tests/Search/test_search.py @@ -60,10 +60,8 @@ def run_test_search_http_error(search_parameters, status_code: Number, report: s m.register_uri('POST', f"https://{INTERNAL.CMR_HOST}{INTERNAL.CMR_GRANULE_PATH}", status_code=status_code, json={'errors': {'report': report}}) m.register_uri('POST', f"https://search-error-report.asf.alaska.edu/", real_http=True) searchOptions = ASFSearchOptions(**search_parameters) - results = search(opts=searchOptions) - assert len(results) == 0 with raises(ASFSearchError): - results.raise_if_incomplete() + results = search(opts=searchOptions) return # If we're not doing an empty search we want to fire off one real query to CMR, then interrupt it with an error @@ -82,12 +80,9 @@ def custom_matcher(request: requests.Request): search_parameters['maxResults'] = INTERNAL.CMR_PAGE_SIZE + 1 searchOptions = ASFSearchOptions(**search_parameters) - results = search(opts=searchOptions) - assert results is not None - assert 0 <= len(results) <= INTERNAL.CMR_PAGE_SIZE with raises(ASFSearchError): - results.raise_if_incomplete() + results = search(opts=searchOptions) def run_test_dataset_search(datasets: List): if any(dataset for dataset in datasets if dataset_collections.get(dataset) is None): diff --git a/tests/download/test_download.py b/tests/download/test_download.py index 794224b1..0a3979ee 100644 --- a/tests/download/test_download.py +++ b/tests/download/test_download.py @@ -8,7 +8,7 @@ from asf_search.download.download import download_url def run_test_download_url_auth_error(url, path, filename): - with patch('asf_search.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 401 mock_get.return_value = resp @@ -17,17 +17,17 @@ def run_test_download_url_auth_error(url, path, filename): with pytest.raises(ASFDownloadError): download_url(url, path, filename) - with patch('asf_search.download.os.path.isdir') as path_mock: + with patch('os.path.isdir') as path_mock: path_mock.return_value = True if url == "urlError": - with patch('asf_search.download.os.path.isfile') as isfile_mock: + with patch('os.path.isfile') as isfile_mock: isfile_mock.return_value = False with pytest.raises(ASFAuthenticationError): download_url(url, path, filename) - with patch('asf_search.download.os.path.isfile') as isfile_mock: + with patch('os.path.isfile') as isfile_mock: isfile_mock.return_value = True with pytest.warns(Warning): @@ -35,13 +35,13 @@ def run_test_download_url_auth_error(url, path, filename): def run_test_download_url(url, path, filename): if filename == 'BURST': - with patch('asf_search.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 202 resp.headers.update({'content-type': 'application/json'}) mock_get.return_value = resp - with patch('asf_search.ASFSession.get') as mock_get_burst: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get_burst: resp_2 = requests.Response() resp_2.status_code = 200 resp_2.headers.update({'content-type': 'image/tiff'}) @@ -51,7 +51,7 @@ def run_test_download_url(url, path, filename): with patch('builtins.open', unittest.mock.mock_open()) as m: download_url(url, path, filename) else: - with patch('asf_search.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 200 mock_get.return_value = resp diff --git a/tests/pytest-config.yml b/tests/pytest-config.yml index 0407bd6b..9087d87b 100644 --- a/tests/pytest-config.yml +++ b/tests/pytest-config.yml @@ -188,10 +188,10 @@ test_types: required_in_title: serialization method: test_serialization -- For running ASFSearchOptions tests: - required_in_title: ASFSearchResults-format - required_keys: results - method: test_output_format +# - For running ASFSearchOptions tests: +# required_in_title: ASFSearchResults-format +# required_keys: results +# method: test_output_format - For running search-api keyword-collection aliasing tests: required_in_title: test-aliasing-search-against-api diff --git a/tests/pytest-managers.py b/tests/pytest-managers.py index 1cb9df47..b6619f3c 100644 --- a/tests/pytest-managers.py +++ b/tests/pytest-managers.py @@ -82,7 +82,7 @@ def test_ASFSession_Error(**args) -> None: test_info = args["test_info"] username = test_info["username"] password = test_info["password"] - with patch('asf_search.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get: mock_get.return_value = "Error" with raises(ASFAuthenticationError): @@ -457,15 +457,15 @@ def safe_load_tuple(param): return param -def test_output_format(**args) -> None: - test_info = args['test_info'] +# def test_output_format(**args) -> None: +# test_info = args['test_info'] - products = get_resource(test_info['results']) - if not isinstance(products, List): - products = [products] - results = ASFSearchResults([as_ASFProduct({'meta': product['meta'], 'umm': product['umm']}, ASFSession()) for product in products]) +# products = get_resource(test_info['results']) +# if not isinstance(products, List): +# products = [products] +# results = ASFSearchResults([as_ASFProduct({'meta': product['meta'], 'umm': product['umm']}, ASFSession()) for product in products]) - run_test_output_format(results) +# run_test_output_format(results) def test_keyword_aliasing_results(**args) -> None: test_info = args['test_info'] diff --git a/tests/yml_tests/test_ASFSearchResults.yml b/tests/yml_tests/test_ASFSearchResults.yml index 2fd49568..046d2df5 100644 --- a/tests/yml_tests/test_ASFSearchResults.yml +++ b/tests/yml_tests/test_ASFSearchResults.yml @@ -29,17 +29,17 @@ tests: - Test ASFSearchResults_intersection antimeridian: wkt: POLYGON((-181 -89, -179 -89, -179 89, -181 89, -181 -89)) -- Test ASFSearchResults-format Fairbanks slc: - results: [Fairbanks_SLC.yml] +# - Test ASFSearchResults-format Fairbanks slc: +# results: [Fairbanks_SLC.yml] -- Test ASFSearchResults-format Fairbanks S1 Stack: - results: Fairbanks_S1_stack.yml +# - Test ASFSearchResults-format Fairbanks S1 Stack: +# results: Fairbanks_S1_stack.yml -- Test ASFSearchResults-format Alos: - results: Alos_response.yml +# - Test ASFSearchResults-format Alos: +# results: Alos_response.yml -- Test ASFSearchResults-format L1: - results: Fairbanks_L1.yml +# - Test ASFSearchResults-format L1: +# results: Fairbanks_L1.yml -- Test ASFSearchResults-format ERS Stack: - results: Fairbanks_ers_stack.yml +# - Test ASFSearchResults-format ERS Stack: +# results: Fairbanks_ers_stack.yml From 47b46f7750c44a57861ad1ed37eed411bc8e8a74 Mon Sep 17 00:00:00 2001 From: kim Date: Tue, 5 Mar 2024 14:10:10 -0900 Subject: [PATCH 18/40] remove "ASFSession.ASFSession" in patching test cases --- tests/ASFProduct/test_ASFProduct.py | 2 +- tests/ASFSession/test_ASFSession.py | 6 +++--- tests/download/test_download.py | 8 ++++---- tests/pytest-managers.py | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/ASFProduct/test_ASFProduct.py b/tests/ASFProduct/test_ASFProduct.py index efa62c14..42214a2f 100644 --- a/tests/ASFProduct/test_ASFProduct.py +++ b/tests/ASFProduct/test_ASFProduct.py @@ -70,7 +70,7 @@ def run_test_product_get_stack_options(reference, options): def run_test_ASFProduct_download(reference, filename, filetype, additional_urls): product = as_ASFProduct(reference, ASFSession()) product.properties['additionalUrls'] = additional_urls - with patch('asf_search.ASFSession.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 200 mock_get.return_value = resp diff --git a/tests/ASFSession/test_ASFSession.py b/tests/ASFSession/test_ASFSession.py index ff28abb3..21382372 100644 --- a/tests/ASFSession/test_ASFSession.py +++ b/tests/ASFSession/test_ASFSession.py @@ -16,7 +16,7 @@ def run_auth_with_creds(username: str, password: str): def run_auth_with_token(token: str): session = ASFSession() - with patch('asf_search.ASFSession.ASFSession.get') as mock_token_session: + with patch('asf_search.ASFSession.get') as mock_token_session: if not token.startswith('Bearer EDL'): mock_token_session.return_value.status_code = 400 session.auth_with_token(token) @@ -43,7 +43,7 @@ def run_test_asf_session_rebuild_auth( session = ASFSession() - with patch('asf_search.ASFSession.ASFSession.get') as mock_token_session: + with patch('asf_search.ASFSession.get') as mock_token_session: mock_token_session.return_value.status_code = 200 session.auth_with_token("bad_token") @@ -57,7 +57,7 @@ def run_test_asf_session_rebuild_auth( response.request.url = response_domain response.headers.update({'Authorization': 'Bearer fakeToken'}) - with patch('asf_search.ASFSession.ASFSession._get_domain') as hostname_patch: + with patch('asf_search.ASFSession._get_domain') as hostname_patch: hostname_patch.side_effect = [original_domain, response_domain] session.rebuild_auth(req, response) diff --git a/tests/download/test_download.py b/tests/download/test_download.py index 0a3979ee..a89cd872 100644 --- a/tests/download/test_download.py +++ b/tests/download/test_download.py @@ -8,7 +8,7 @@ from asf_search.download.download import download_url def run_test_download_url_auth_error(url, path, filename): - with patch('asf_search.ASFSession.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 401 mock_get.return_value = resp @@ -35,13 +35,13 @@ def run_test_download_url_auth_error(url, path, filename): def run_test_download_url(url, path, filename): if filename == 'BURST': - with patch('asf_search.ASFSession.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 202 resp.headers.update({'content-type': 'application/json'}) mock_get.return_value = resp - with patch('asf_search.ASFSession.ASFSession.get') as mock_get_burst: + with patch('asf_search.ASFSession.get') as mock_get_burst: resp_2 = requests.Response() resp_2.status_code = 200 resp_2.headers.update({'content-type': 'image/tiff'}) @@ -51,7 +51,7 @@ def run_test_download_url(url, path, filename): with patch('builtins.open', unittest.mock.mock_open()) as m: download_url(url, path, filename) else: - with patch('asf_search.ASFSession.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 200 mock_get.return_value = resp diff --git a/tests/pytest-managers.py b/tests/pytest-managers.py index b6619f3c..96d2cb5e 100644 --- a/tests/pytest-managers.py +++ b/tests/pytest-managers.py @@ -82,7 +82,7 @@ def test_ASFSession_Error(**args) -> None: test_info = args["test_info"] username = test_info["username"] password = test_info["password"] - with patch('asf_search.ASFSession.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.get') as mock_get: mock_get.return_value = "Error" with raises(ASFAuthenticationError): From bfc67dfd6a9437e7a09c248d245102587844b224 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 8 Mar 2024 13:20:19 -0900 Subject: [PATCH 19/40] fixes broken jsonlite outputs when results are empty --- asf_search/ASFSearchOptions/validators.py | 2 +- asf_search/export/jsonlite.py | 5 ++++- asf_search/export/jsonlite2.py | 6 +++++- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index 9c09d568..fb192682 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -129,7 +129,7 @@ def parse_cmr_keywords_list(value: Sequence[Dict]): # Parse and validate an iterable of strings: "foo,bar,baz" def parse_string_list(value: Sequence[str]) -> List[str]: - return parse_list(value, str) + return parse_list(value, parse_string) # Parse and validate an iterable of integers: "1,2,3" diff --git a/asf_search/export/jsonlite.py b/asf_search/export/jsonlite.py index 56848b35..99e73de8 100644 --- a/asf_search/export/jsonlite.py +++ b/asf_search/export/jsonlite.py @@ -19,7 +19,10 @@ def results_to_jsonlite(results): ASF_LOGGER.info('started translating results to jsonlite format') - + if len(results) == 0: + yield from json.JSONEncoder(indent=2, sort_keys=True).iterencode({'results': []}) + return + if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): results = [results] diff --git a/asf_search/export/jsonlite2.py b/asf_search/export/jsonlite2.py index 125363df..fac39943 100644 --- a/asf_search/export/jsonlite2.py +++ b/asf_search/export/jsonlite2.py @@ -7,7 +7,11 @@ def results_to_jsonlite2(results): ASF_LOGGER.info('started translating results to jsonlite2 format') - + + if len(results) == 0: + yield from json.JSONEncoder(indent=2, sort_keys=True).iterencode({'results': []}) + return + if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): results = [results] From 9410d8589b1d7e21d679f0cf56660c1bf4c6091a Mon Sep 17 00:00:00 2001 From: kim Date: Tue, 2 Apr 2024 16:22:48 -0800 Subject: [PATCH 20/40] fixes changed method name in S1Product --- asf_search/Products/S1Product.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index 6092fea3..45ea8fc3 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -33,7 +33,7 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): self.properties['s3Urls'] = self._get_s3_urls() - if self._has_baseline(): + if self.has_baseline(): self.baseline = self.get_baseline_calc_properties() def has_baseline(self) -> bool: From 853807716cf5259414ed6bded5e2d63264d89b11 Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 8 Apr 2024 17:05:28 -0800 Subject: [PATCH 21/40] got asfframe known bugs tests passing, added comment for first test in file --- asf_search/Products/RADARSATProduct.py | 3 +- asf_search/WKT/FilesToWKT.py | 209 +++++++++++++++++++++++++ asf_search/WKT/__init__.py | 1 + asf_search/__init__.py | 2 +- asf_search/export/csv.py | 4 +- setup.py | 7 +- 6 files changed, 221 insertions(+), 5 deletions(-) create mode 100644 asf_search/WKT/FilesToWKT.py diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 7db7f1b2..734194d1 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -1,6 +1,6 @@ from typing import Dict, Union from asf_search import ASFSearchOptions, ASFSession, ASFProduct, ASFStackableProduct -from asf_search.CMR.translate import try_parse_float +from asf_search.CMR.translate import try_parse_float, try_parse_int from asf_search.constants import PRODUCT_TYPE @@ -13,6 +13,7 @@ class RADARSATProduct(ASFStackableProduct): 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): diff --git a/asf_search/WKT/FilesToWKT.py b/asf_search/WKT/FilesToWKT.py new file mode 100644 index 00000000..e7254411 --- /dev/null +++ b/asf_search/WKT/FilesToWKT.py @@ -0,0 +1,209 @@ +import os +import json +import zipfile +import shapefile +import defusedxml.minidom as md +from kml2geojson import build_feature_collection as kml2json +from geomet import wkt +from io import BytesIO +import geopandas + +# taken from asf's Discovery-WKTUtils +# Repo: https://github.com/asfadmin/Discovery-WKTUtils +# File: https://github.com/asfadmin/Discovery-WKTUtils/blob/devel/WKTUtils/FilesToWKT.py +class filesToWKT: + # files = [ open(dir, 'rb'), open(dir2, 'rb'), open(dir3, 'rb') ] + def __init__(self, files): + self.files = files + self.errors = [] + self.returned_dict = {} + # If they pass only one, make that a list of one: + if not isinstance(files, type([])): + self.files = [self.files] + # Have to group all shp types together: + file_dict = {} + for file in self.files: + try: + full_name = file.filename + except AttributeError: + full_name = file.name + name = ".".join(full_name.split(".")[:-1]) # Everything before the last dot. + ext = full_name.split(".")[-1:][0].lower() # Everything after the last dot. + ### First see if geopandas can handle it. + try: + geoshape: geopandas.GeoDataFrame = geopandas.read_file(file) + # Turn from GeoDataFrame to GeoSeries: + geoshape = geoshape.geometry + # Add it to the file list: + self.add_file_to_dict(file_dict, name+".pandas", geoshape) + continue + # If anything goes wrong, try to go back to the old ways: + except: + file.seek(0) # Move read curser to 0, lets you read again + if ext == "zip": + # First check for a full shapefile set: + with BytesIO(file.read()) as zip_f: + zip_obj = zipfile.ZipFile(zip_f) + parts = zip_obj.namelist() + for part_path in parts: + # If it's a dir, skip it. ('parts' still contains the files in that dir) + if part_path.endswith("/"): + continue + self.add_file_to_dict(file_dict, part_path, zip_obj.read(part_path)) + else: + # Try to add whatever it is: + self.add_file_to_dict(file_dict, full_name, file.read()) + + # With everything organized in dict, start parsing them: + wkt_list = [] + for key, val in file_dict.items(): + ext = key.split(".")[-1:][0].lower() + # If it's a shp set. (Check first, because 'file.kml.shp' will be loaded, and + # the key will become 'file.kml'. The val is always a dict for shps tho): + if isinstance(val, type({})): + returned_wkt = parse_shapefile(val) + elif ext == "pandas": + # For this, val IS the geopandas object. + # Check if you need to reporject the wkt. (Might be None): + if val.crs and val.crs != "EPSG:4326": + val = val.to_crs("EPSG:4326") + if len(val) == 0: + continue + elif len(val) == 1: + returned_wkt = json_to_wkt(val[0].__geo_interface__) + else: + tmp_list = [json_to_wkt(shape.__geo_interface__) for shape in val] + returned_wkt = "GEOMETRYCOLLECTION ({0})".format(",".join(tmp_list)) + # Check for each type now: + elif ext == "geojson": + returned_wkt = parse_geojson(val) + elif ext == "kml": + returned_wkt = parse_kml(val) + else: + # This *should* never get hit, but someone might add a new file-type in 'add_file_to_dict' w/out declaring it here. + self.errors.append({"type": "STREAM_UNKNOWN", "report": "Ignoring file with unknown tag. File: '{0}'".format(os.path.basename(key))}) + continue + # If the parse function returned a json error: + if isinstance(returned_wkt, type({})) and "error" in returned_wkt: + # Give the error a better error discription: + returned_wkt["error"]["report"] += " (Cannot load file: '{0}')".format(os.path.basename(key)) + self.errors.append(returned_wkt["error"]) + continue + else: + wkt_list.append(returned_wkt) + + # Turn it into a single WKT: + full_wkt = "GEOMETRYCOLLECTION({0})".format(",".join(wkt_list)) + + # Bring it to json and back, to collaps any nested GEOMETRYCOLLECTIONS. + # It'll be in a collection if and only if there are more than one shapes. + full_wkt = json_to_wkt(wkt.loads(full_wkt)) + self.returned_dict = {"parsed wkt": full_wkt} + + + def getWKT(self): + # Only return the 'errors' key IF there are errors... + if self.errors != []: + self.returned_dict['errors'] = self.errors + return self.returned_dict + + # Helper for organizing files into a dict, combining shps/shx, etc. + def add_file_to_dict(self, file_dict, full_name, file_stream): + ext = full_name.split(".")[-1:][0].lower() # Everything after the last dot. + file_name = ".".join(full_name.split(".")[:-1]) # Everything before the last dot. + + # SHP'S: + if ext in ["shp", "shx", "dbf"]: + # Save shps as {"filename": {"shp": data, "shx": data, "dbf": data}, "file_2.kml": kml_data} + if file_name not in file_dict: + file_dict[file_name] = {} + file_dict[file_name][ext] = BytesIO(file_stream) + elif ext in ["pandas"]: + file_dict[full_name] = file_stream # Actually geopandas object for this one. + # BASIC FILES: + elif ext in ["kml", "geojson"]: + file_dict[full_name] = BytesIO(file_stream) + # Else they pass a zip again: + elif ext in ["zip"]: + self.errors.append({"type": "FILE_UNZIP", "report": "Cannot unzip double-compressed files. File: '{0}'.".format(os.path.basename(full_name))}) + else: + self.errors.append({"type": "FILE_UNKNOWN", "report": "Ignoring file with unknown extension. File: '{0}'.".format(os.path.basename(full_name))}) + + + +# Takes any json, and returns a list of all {"type": x, "coordinates": y} objects +# found, ignoring anything else in the block +def recurse_find_geojson(json_input): + # NOTE: geojson doesn't go through this anymore, with adding geopandas + # parser. Instead, make this happen AFTER shapes are loaded/transformed + # to geojson, to simplify EVERYTHING handed to us down. + if isinstance(json_input, type({})): + # If it's a dict, try to load the minimal required for a shape. + # Then recurse on every object, just incase more are nested inside: + try: + new_shape = { "type": json_input["type"], "coordinates": json_input["coordinates"] } + yield new_shape + except KeyError: + pass + for key_value_pair in json_input.items(): + yield from recurse_find_geojson(key_value_pair[1]) + # If it's a list, just loop through it: + elif isinstance(json_input, type([])): + for item in json_input: + yield from recurse_find_geojson(item) + +# Takes a json, and returns a possibly-simplified wkt_str +# Used by both parse_geojson, and parse_kml +def json_to_wkt(geojson): + geojson_list = [] + for new_shape in recurse_find_geojson(geojson): + geojson_list.append(new_shape) + + if len(geojson_list) == 0: + return {'error': {'type': 'VALUE', 'report': 'Could not find any shapes inside geojson.'}} + elif len(geojson_list) == 1: + wkt_json = geojson_list[0] + else: + wkt_json = { 'type': 'GeometryCollection', 'geometries': geojson_list } + + try: + wkt_str = wkt.dumps(wkt_json) + except (KeyError, ValueError) as e: + return {'error': {'type': 'VALUE', 'report': 'Problem converting a shape to string: {0}'.format(str(e))}} + return wkt_str + + +def parse_geojson(f): + try: + data = f.read() + geojson = json.loads(data) + except json.JSONDecodeError as e: + return {'error': {'type': 'DECODE', 'report': 'Could not parse GeoJSON: {0}'.format(str(e))}} + except KeyError as e: + return {'error': {'type': 'KEY', 'report': 'Missing expected key: {0}'.format(str(e))}} + except ValueError as e: + return {'error': {'type': 'VALUE', 'report': 'Could not parse GeoJSON: {0}'.format(str(e))}} + return json_to_wkt(geojson) + + +def parse_kml(f): + try: + kml_str = f.read() + kml_root = md.parseString(kml_str, forbid_dtd=True) + wkt_json = kml2json(kml_root) + # All these BUT the type/value errors are for the md.parseString: + # except (DefusedXmlException, DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden, NotSupportedError, TypeError, ValueError) as e: + except Exception as e: + return {'error': {'type': 'VALUE', 'report': 'Could not parse kml: {0}'.format(str(e))}} + return json_to_wkt(wkt_json) + +def parse_shapefile(fileset): + try: + reader = shapefile.Reader(**fileset) + shapes = [i.__geo_interface__ for i in reader.shapes()] + # In the sourcecode, it looks like sometimes the reader throws "Exception": + except Exception as e: + return {'error': {'type': 'VALUE', 'report': 'Could not parse shp: {0}'.format(str(e))}} + wkt_json = {'type':'GeometryCollection', 'geometries': shapes } + wkt_str = json_to_wkt(wkt_json) + return wkt_str diff --git a/asf_search/WKT/__init__.py b/asf_search/WKT/__init__.py index b3cb6ee8..81c8854e 100644 --- a/asf_search/WKT/__init__.py +++ b/asf_search/WKT/__init__.py @@ -1,2 +1,3 @@ from .validate_wkt import validate_wkt from .RepairEntry import RepairEntry +from .FilesToWKT import filesToWKT diff --git a/asf_search/__init__.py b/asf_search/__init__.py index 91e88f22..68cf208a 100644 --- a/asf_search/__init__.py +++ b/asf_search/__init__.py @@ -34,7 +34,7 @@ from .download import * from .CMR import * from .baseline import * -from .WKT import validate_wkt +from .WKT import validate_wkt, filesToWKT from .export import * REPORT_ERRORS=True diff --git a/asf_search/export/csv.py b/asf_search/export/csv.py index 575e7320..2be9b71b 100644 --- a/asf_search/export/csv.py +++ b/asf_search/export/csv.py @@ -20,7 +20,7 @@ ('doppler', ['AdditionalAttributes', ('Name', 'DOPPLER'), 'Values', 0]), ('sizeMB', ['DataGranule', 'ArchiveAndDistributionInformation', 0, 'Size']), ('insarStackSize', ['AdditionalAttributes', ('Name', 'INSAR_STACK_SIZE'), 'Values', 0]), - ('offNadirAngle', ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0]) + ('offNadirAngle', ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0]), ] fieldnames = ( @@ -122,7 +122,7 @@ def getItem(self, p): "Sensor":p.get('sensor'), "Beam Mode":p.get('beamModeType'), "Beam Mode Description":p.get('configurationName'), - "Orbit":p.get('orbit'), + "Orbit":p.get('orbit') if not isinstance(p.get('orbit'), list) else p.get('orbit')[0], "Path Number":p.get('pathNumber'), "Frame Number":p.get('frameNumber'), "Acquisition Date":p.get('sceneDate'), diff --git a/setup.py b/setup.py index d88008cf..fe8c1ede 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,12 @@ ] extra_requirements = [ - "remotezip>=0.10.0" + "remotezip>=0.10.0", # required for remote zip functionality + "geopandas", # required for FilesToWKT functionality + "geomet", + "kml2geojson", + "shapefile", + "zipfile" ] From 460e261d5f7c30cd96d66075bf9ca09fc5957f6a Mon Sep 17 00:00:00 2001 From: kim Date: Tue, 9 Apr 2024 11:35:57 -0800 Subject: [PATCH 22/40] Exposes esa_frame in RADARSAT Product, orbit can be list in csv (for now) --- asf_search/ASFSearchOptions/ASFSearchOptions.py | 2 +- asf_search/Products/RADARSATProduct.py | 1 + asf_search/export/csv.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/asf_search/ASFSearchOptions/ASFSearchOptions.py b/asf_search/ASFSearchOptions/ASFSearchOptions.py index 8b1103e7..d8c83aff 100644 --- a/asf_search/ASFSearchOptions/ASFSearchOptions.py +++ b/asf_search/ASFSearchOptions/ASFSearchOptions.py @@ -69,7 +69,7 @@ def __str__(self): """ What to display if `print(opts)` is called. """ - return json.dumps(dict(self), indent=4) + return json.dumps(dict(self), default=str, indent=4) # Default is set to '...', since 'None' is a very valid value here def pop(self, key, default=...): diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 734194d1..a2958e1c 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -14,6 +14,7 @@ class RADARSATProduct(ASFStackableProduct): 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) + 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): diff --git a/asf_search/export/csv.py b/asf_search/export/csv.py index 2be9b71b..47af555e 100644 --- a/asf_search/export/csv.py +++ b/asf_search/export/csv.py @@ -122,7 +122,7 @@ def getItem(self, p): "Sensor":p.get('sensor'), "Beam Mode":p.get('beamModeType'), "Beam Mode Description":p.get('configurationName'), - "Orbit":p.get('orbit') if not isinstance(p.get('orbit'), list) else p.get('orbit')[0], + "Orbit":p.get('orbit'), "Path Number":p.get('pathNumber'), "Frame Number":p.get('frameNumber'), "Acquisition Date":p.get('sceneDate'), From 3b81b7529b36b2fb96daabdf33d025c3699aaffb Mon Sep 17 00:00:00 2001 From: kim Date: Wed, 17 Apr 2024 08:12:09 -0800 Subject: [PATCH 23/40] adds some log messages for measuring performance --- asf_search/ASFProduct.py | 33 +++++++----------------- asf_search/ASFStackableProduct.py | 9 ------- asf_search/Products/AIRSARProduct.py | 10 ++----- asf_search/Products/ALOSProduct.py | 10 ++----- asf_search/Products/ARIAS1GUNWProduct.py | 10 ++----- asf_search/Products/ERSProduct.py | 10 ++----- asf_search/Products/JERSProduct.py | 10 ++----- asf_search/Products/NISARProduct.py | 10 ++----- asf_search/Products/OPERAS1Product.py | 10 ++----- asf_search/Products/RADARSATProduct.py | 10 ++----- asf_search/Products/S1BurstProduct.py | 12 +++------ asf_search/Products/S1Product.py | 10 ++----- asf_search/Products/SEASATProduct.py | 10 ++----- asf_search/Products/SIRCProduct.py | 10 ++----- asf_search/Products/SMAPProduct.py | 10 ++----- asf_search/Products/UAVSARProduct.py | 10 ++----- asf_search/search/baseline_search.py | 11 ++------ asf_search/search/search.py | 6 ++++- asf_search/search/search_generator.py | 23 ++++++++++++++--- 19 files changed, 65 insertions(+), 159 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index a6dcf18d..62b4bb91 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -41,7 +41,7 @@ class ASFProduct: def get_classname(cls): return cls.__name__ - _base_properties = { + _properties_paths = { # min viable product 'centerLat': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0], 'cast': try_parse_float}, 'centerLon': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0], 'cast': try_parse_float}, @@ -67,16 +67,14 @@ def get_classname(cls): 'sensor': {'path': [ 'Platforms', 0, 'Instruments', 0, 'ShortName'], }, } """ - _base_properties dictionary, mapping readable property names to paths and optional type casting + _properties_paths dictionary, mapping readable property names to paths and optional type casting entries are organized as such: - `PROPERTY_NAME`: The name the property should be called in `ASFProduct.properties` - `path`: the expected path in the CMR UMM json granule response as a list - `cast`: (optional): the optional type casting method - Defining `_base_properties` in subclasses allows for defining custom properties or overiding existing ones. - See `S1Product.get_property_paths()` on how subclasses are expected to - combine `ASFProduct._base_properties` with their own separately defined `_base_properties` + Defining `_properties_paths` in subclasses allows for defining custom properties or overiding existing ones. """ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @@ -263,9 +261,11 @@ def translate_product(self, item: Dict) -> Dict: umm = item.get('umm') + additionalAttributes = {attr['Name']: attr['Values'] for attr in umm['AdditionalAttributes']} + properties = { - prop: self._read_umm_property(umm, umm_mapping) - for prop, umm_mapping in self.get_property_paths().items() + prop: additionalAttributes.get(umm_mapping['path'][1][1])[0] if umm_mapping[0] == 'AdditionalAttributes' else self._read_umm_property(umm, umm_mapping) + for prop, umm_mapping in self._properties_paths } if properties.get('url') is not None: @@ -282,19 +282,6 @@ def translate_product(self, item: Dict) -> Dict: return {'geometry': geometry, 'properties': properties, 'type': 'Feature'} - # ASFProduct subclasses define extra/override param key + UMM pathing here - @staticmethod - def get_property_paths() -> Dict: - """ - Returns _base_properties of class, subclasses such as `S1Product` (or user provided subclasses) can override this to - define which properties they want in their subclass's properties dict. - - (See `S1Product.get_property_paths()` for example of combining _base_properties of multiple classes) - - :returns dictionary, {`PROPERTY_NAME`: {'path': [umm, path, to, value], 'cast (optional)': Callable_to_cast_value}, ...} - """ - return ASFProduct._base_properties - def get_sort_keys(self) -> Tuple: """ Returns tuple of primary and secondary date values used for sorting final search results @@ -376,7 +363,9 @@ def umm_get(item: Dict, *args): if item is None: return None for key in args: - if isinstance(key, int): + if isinstance(key, str): + item = item.get(key) + elif isinstance(key, int): item = item[key] if key < len(item) else None elif isinstance(key, tuple): (a, b) = key @@ -399,8 +388,6 @@ def umm_get(item: Dict, *args): break if not found: return None - else: - item = item.get(key) if item is None: return None if item in [None, 'NA', 'N/A', '']: diff --git a/asf_search/ASFStackableProduct.py b/asf_search/ASFStackableProduct.py index b85f0626..e41d274e 100644 --- a/asf_search/ASFStackableProduct.py +++ b/asf_search/ASFStackableProduct.py @@ -13,8 +13,6 @@ class ASFStackableProduct(ASFProduct): ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ - _base_properties = { - } class BaselineCalcType(Enum): """ @@ -53,13 +51,6 @@ def get_stack_opts(self, opts: ASFSearchOptions = None): stack_opts.insarStackId = self.properties['insarStackId'] return stack_opts - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **ASFStackableProduct._base_properties - } - def is_valid_reference(self): # we don't stack at all if any of stack is missing insarBaseline, unlike stacking S1 products(?) if 'insarBaseline' not in self.baseline: diff --git a/asf_search/Products/AIRSARProduct.py b/asf_search/Products/AIRSARProduct.py index 54c2c03c..aa1e0e8b 100644 --- a/asf_search/Products/AIRSARProduct.py +++ b/asf_search/Products/AIRSARProduct.py @@ -7,7 +7,8 @@ class AIRSARProduct(ASFProduct): """ ASF Dataset Overview Page: https://asf.alaska.edu/data-sets/sar-data-sets/airsar/ """ - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, @@ -16,10 +17,3 @@ class AIRSARProduct(ASFProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **AIRSARProduct._base_properties - } diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index d90902f7..035b3d1f 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -10,7 +10,8 @@ class ALOSProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'offNadirAngle': {'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0], 'cast': try_parse_float}, @@ -31,10 +32,3 @@ def get_default_baseline_product_type() -> Union[str, None]: Returns the product type to search for when building a baseline stack. """ return PRODUCT_TYPE.L1_1 - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **ALOSProduct._base_properties - } diff --git a/asf_search/Products/ARIAS1GUNWProduct.py b/asf_search/Products/ARIAS1GUNWProduct.py index 2d88419a..030a2399 100644 --- a/asf_search/Products/ARIAS1GUNWProduct.py +++ b/asf_search/Products/ARIAS1GUNWProduct.py @@ -11,7 +11,8 @@ class ARIAS1GUNWProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/derived-data-sets/sentinel-1-interferograms/ """ - _base_properties = { + _properties_paths = { + **S1Product._properties_paths, 'perpendicularBaseline': {'path': ['AdditionalAttributes', ('Name', 'PERPENDICULAR_BASELINE'), 'Values', 0], 'cast': try_parse_float}, 'orbit': {'path': ['OrbitCalculatedSpatialDomains']} } @@ -26,13 +27,6 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): self.properties['fileName'] = self.properties['fileID'] + '.' + urls[0].split('.')[-1] self.properties['additionalUrls'] = [urls[1]] - @staticmethod - def get_property_paths() -> Dict: - return { - **S1Product.get_property_paths(), - **ARIAS1GUNWProduct._base_properties - } - def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ Build search options that can be used to find an insar stack for this product diff --git a/asf_search/Products/ERSProduct.py b/asf_search/Products/ERSProduct.py index a2dbff98..4a53d35c 100644 --- a/asf_search/Products/ERSProduct.py +++ b/asf_search/Products/ERSProduct.py @@ -11,7 +11,8 @@ class ERSProduct(ASFStackableProduct): ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]}, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0]}, @@ -23,13 +24,6 @@ class ERSProduct(ASFStackableProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **ERSProduct._base_properties - } - @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ diff --git a/asf_search/Products/JERSProduct.py b/asf_search/Products/JERSProduct.py index 1963225f..f829a760 100644 --- a/asf_search/Products/JERSProduct.py +++ b/asf_search/Products/JERSProduct.py @@ -7,7 +7,8 @@ class JERSProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/jers-1/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, @@ -24,10 +25,3 @@ def get_default_baseline_product_type() -> Union[str, None]: Returns the product type to search for when building a baseline stack. """ return PRODUCT_TYPE.L0 - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **JERSProduct._base_properties - } diff --git a/asf_search/Products/NISARProduct.py b/asf_search/Products/NISARProduct.py index 279e014a..5ca3e239 100644 --- a/asf_search/Products/NISARProduct.py +++ b/asf_search/Products/NISARProduct.py @@ -10,7 +10,8 @@ class NISARProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']} } @@ -40,13 +41,6 @@ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: :return: ASFSearchOptions describing appropriate options for building a stack from this product """ return None - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **NISARProduct._base_properties - } def get_sort_keys(self): keys = super().get_sort_keys() diff --git a/asf_search/Products/OPERAS1Product.py b/asf_search/Products/OPERAS1Product.py index d205b840..95efc891 100644 --- a/asf_search/Products/OPERAS1Product.py +++ b/asf_search/Products/OPERAS1Product.py @@ -8,7 +8,8 @@ class OPERAS1Product(S1Product): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/opera/ """ - _base_properties = { + _properties_paths = { + **S1Product._properties_paths, 'centerLat': {'path': []}, # Opera products lacks these fields 'centerLon': {'path': []}, 'frameNumber': {'path': []}, @@ -46,13 +47,6 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): def get_stack_opts(self, opts: ASFSearchOptions = ASFSearchOptions()) -> ASFSearchOptions: return opts - @staticmethod - def get_property_paths() -> Dict: - return { - **S1Product.get_property_paths(), - **OPERAS1Product._base_properties - } - @staticmethod def get_default_baseline_product_type() -> None: """ diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index a2958e1c..6d2a842a 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -8,7 +8,8 @@ class RADARSATProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/radarsat-1/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, @@ -20,13 +21,6 @@ class RADARSATProduct(ASFStackableProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **RADARSATProduct._base_properties - } - @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ diff --git a/asf_search/Products/S1BurstProduct.py b/asf_search/Products/S1BurstProduct.py index f4f7a249..7d35f923 100644 --- a/asf_search/Products/S1BurstProduct.py +++ b/asf_search/Products/S1BurstProduct.py @@ -17,7 +17,8 @@ class S1BurstProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/ """ - _base_properties = { + _properties_paths = { + **S1Product._properties_paths, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]}, 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int}, 'relativeBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_RELATIVE'), 'Values', 0], 'cast': try_parse_int}, @@ -65,14 +66,7 @@ def get_stack_opts(self, opts: ASFSearchOptions = None): stack_opts.fullBurstID = self.properties['burst']['fullBurstID'] stack_opts.polarization = [self.properties['polarization']] return stack_opts - - @staticmethod - def get_property_paths() -> Dict: - return { - **S1Product.get_property_paths(), - **S1BurstProduct._base_properties - } - + def _get_additional_filenames_and_urls(self, default_filename: str = None): # Burst XML filenames are just numbers, this makes it more indentifiable if default_filename is None: diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index 45ea8fc3..c6f24ff5 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -15,7 +15,8 @@ class S1Product(ASFStackableProduct): ASF Dataset Overview Page: https://asf.alaska.edu/datasets/daac/sentinel-1/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, @@ -120,13 +121,6 @@ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: return stack_opts - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **S1Product._base_properties - } - def is_valid_reference(self) -> bool: keys = ['postPosition', 'postPositionTime', 'prePosition', 'postPositionTime'] diff --git a/asf_search/Products/SEASATProduct.py b/asf_search/Products/SEASATProduct.py index e726d756..1158fcf1 100644 --- a/asf_search/Products/SEASATProduct.py +++ b/asf_search/Products/SEASATProduct.py @@ -7,7 +7,8 @@ class SEASATProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/seasat/ """ - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'bytes': {'path': [ 'AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, @@ -15,10 +16,3 @@ class SEASATProduct(ASFProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **SEASATProduct._base_properties - } diff --git a/asf_search/Products/SIRCProduct.py b/asf_search/Products/SIRCProduct.py index e5e9ad31..e2b05df3 100644 --- a/asf_search/Products/SIRCProduct.py +++ b/asf_search/Products/SIRCProduct.py @@ -5,7 +5,8 @@ class SIRCProduct(ASFProduct): """ Dataset Documentation Page: https://eospso.nasa.gov/missions/spaceborne-imaging-radar-c """ - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion'] }, @@ -14,10 +15,3 @@ class SIRCProduct(ASFProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **SIRCProduct._base_properties - } diff --git a/asf_search/Products/SMAPProduct.py b/asf_search/Products/SMAPProduct.py index f78f00e0..a2750032 100644 --- a/asf_search/Products/SMAPProduct.py +++ b/asf_search/Products/SMAPProduct.py @@ -7,7 +7,8 @@ class SMAPProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/ """ - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, @@ -15,10 +16,3 @@ class SMAPProduct(ASFProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **SMAPProduct._base_properties - } diff --git a/asf_search/Products/UAVSARProduct.py b/asf_search/Products/UAVSARProduct.py index 73acd812..f33a39dc 100644 --- a/asf_search/Products/UAVSARProduct.py +++ b/asf_search/Products/UAVSARProduct.py @@ -7,7 +7,8 @@ class UAVSARProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/uavsar/ """ - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, @@ -15,10 +16,3 @@ class UAVSARProduct(ASFProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **UAVSARProduct._base_properties - } diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index 4d7da17f..77b01f7c 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -100,18 +100,11 @@ def _cast_to_subclass(product: ASFProduct, subclass: Type[ASFProduct]) -> ASFPro example: ``` class MyCustomClass(ASFProduct): - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'some_unique_property': {'path': ['AdditionalAttributes', 'UNIQUE_PROPERTY', ...]} } - ... - - @staticmethod - def get_property_paths() -> dict: - return { - **ASFProduct.get_property_paths(), - **MyCustomClass._base_properties - } # subclass as constructor customReference = reference.cast_to_subclass(MyCustomClass) diff --git a/asf_search/search/search.py b/asf_search/search/search.py index 88818012..cbee1da7 100644 --- a/asf_search/search/search.py +++ b/asf_search/search/search.py @@ -1,8 +1,9 @@ +import time from typing import Union, Sequence, Tuple from copy import copy import datetime -from asf_search import ASFSearchResults +from asf_search import ASF_LOGGER, ASFSearchResults from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.search.search_generator import search_generator @@ -97,10 +98,13 @@ def search( results = ASFSearchResults([]) # The last page will be marked as complete if results sucessful + perf = time.time() for page in search_generator(opts=opts): + ASF_LOGGER.warning(f"Page Time Elapsed {time.time() - perf}") results.extend(page) results.searchComplete = page.searchComplete results.searchOptions = page.searchOptions + perf = time.time() results.raise_if_incomplete() results.sort(key=lambda p: p.get_sort_keys(), reverse=True) diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index bc7114b3..e6d70676 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -1,4 +1,5 @@ import logging +import time from typing import Dict, Generator, Union, Sequence, Tuple, List from copy import copy from requests.exceptions import HTTPError @@ -92,7 +93,6 @@ def search_generator( queries = build_subqueries(opts) for query in queries: translated_opts = translate_opts(query) - ASF_LOGGER.warning(f"TRANSLATED PARAMS: {translated_opts}") cmr_search_after_header = "" subquery_count = 0 @@ -113,7 +113,9 @@ def search_generator( raise opts.session.headers.update({'CMR-Search-After': cmr_search_after_header}) + perf = time.time() last_page = process_page(items, maxResults, subquery_max_results, total, subquery_count, opts) + ASF_LOGGER.warning(f"Page Processing Time {time.time() - perf}") subquery_count += len(last_page) total += len(last_page) last_page.searchComplete = subquery_count == subquery_max_results or total == maxResults @@ -137,9 +139,12 @@ def search_generator( def query_cmr(session: ASFSession, url: str, translated_opts: Dict, sub_query_count: int): response = get_page(session=session, url=url, translated_opts=translated_opts) + perf = time.time() items = [as_ASFProduct(f, session=session) for f in response.json()['items']] + ASF_LOGGER.warning(f"Product Subclassing Time {time.time() - perf}") hits: int = response.json()['hits'] # total count of products given search opts - + # 9-10 per process + # 3.9-5 per process # sometimes CMR returns results with the wrong page size if len(items) != INTERNAL.CMR_PAGE_SIZE and len(items) + sub_query_count < hits: raise CMRIncompleteError(f"CMR returned page of incomplete results. Expected {min(INTERNAL.CMR_PAGE_SIZE, hits - sub_query_count)} results, got {len(items)}") @@ -161,6 +166,8 @@ def process_page(items: List[ASFProduct], max_results: int, subquery_max_results stop=stop_after_attempt(3), ) def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: + + perf = time.time() try: response = session.post(url=url, data=translated_opts, timeout=INTERNAL.CMR_TIMEOUT) response.raise_for_status() @@ -173,6 +180,7 @@ def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: except ReadTimeout as exc: raise ASFSearchError(f'Connection Error (Timeout): CMR took too long to respond. Set asf constant "CMR_TIMEOUT" to increase. ({url=}, timeout={INTERNAL.CMR_TIMEOUT})') from exc + ASF_LOGGER.warning(f"Query Time Elapsed {time.time() - perf}") return response @@ -246,6 +254,8 @@ def set_platform_alias(opts: ASFSearchOptions): opts.platform = list(set(platform_list)) +_dataset_collection_items = dataset_collections.items() + def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: """ Returns the granule umm as the corresponding ASFProduct subclass, or ASFProduct if no equivalent is found @@ -258,18 +268,23 @@ def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: product_type_key = _get_product_type_key(item) # if there's a direct entry in our dataset to product type dict + # perf = time.time() subclass = dataset_to_product_types.get(product_type_key) if subclass is not None: + # ASF_LOGGER.warning(f'subclass selection time {time.time() - perf}') return subclass(item, session=session) # or if the key matches one of the shortnames in any of our datasets - for dataset, collections in dataset_collections.items(): + + for dataset, collections in _dataset_collection_items: if collections.get(product_type_key) is not None: subclass = dataset_to_product_types.get(dataset) if subclass is not None: + # ASF_LOGGER.warning(f'subclass selection time {time.time() - perf}') return subclass(item, session=session) break # dataset exists, but is not in dataset_to_product_types yet - + + # ASF_LOGGER.warning(f'subclass selection time {time.time() - perf}') return ASFProduct(item, session=session) def _get_product_type_key(item: Dict) -> str: From 967ffc17aff61831716a47f63780fc97c1f25bf4 Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 6 May 2024 08:34:03 -0800 Subject: [PATCH 24/40] reverts additional attribute optimization (for now) --- asf_search/ASFProduct.py | 6 +++--- asf_search/search/search_generator.py | 9 ++++----- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index 7cd43f97..14c2e357 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -261,11 +261,11 @@ def translate_product(self, item: Dict) -> Dict: umm = item.get('umm') - additionalAttributes = {attr['Name']: attr['Values'] for attr in umm['AdditionalAttributes']} + # additionalAttributes = {attr['Name']: attr['Values'] for attr in umm['AdditionalAttributes']} properties = { - prop: additionalAttributes.get(umm_mapping['path'][1][1])[0] if umm_mapping[0] == 'AdditionalAttributes' else self._read_umm_property(umm, umm_mapping) - for prop, umm_mapping in self._properties_paths + prop: self._read_umm_property(umm, umm_mapping) + for prop, umm_mapping in self._properties_paths.items() } if properties.get('url') is not None: diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index 798df01c..f002d2f1 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -112,8 +112,8 @@ def search_generator( try: ASF_LOGGER.debug(f'SUBQUERY {subquery_idx + 1}: Fetching page {page_number}') items, subquery_max_results, cmr_search_after_header = query_cmr(opts.session, url, translated_opts, subquery_count) - except (ASFSearchError, CMRIncompleteError) as e: - message = str(e) + except (ASFSearchError, CMRIncompleteError) as exc: + message = str(exc) ASF_LOGGER.error(message) report_search_error(query, message) opts.session.headers.pop('CMR-Search-After', None) @@ -274,7 +274,6 @@ def set_platform_alias(opts: ASFSearchOptions): platform_list.append(plat) opts.platform = list(set(platform_list)) - _dataset_collection_items = dataset_collections.items() def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: @@ -290,13 +289,13 @@ def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: # if there's a direct entry in our dataset to product type dict # perf = time.time() - subclass = _dataset_collection_items.get(product_type_key) + subclass = dataset_collections.get(product_type_key) if subclass is not None: # ASF_LOGGER.warning(f'subclass selection time {time.time() - perf}') return subclass(item, session=session) # if the key matches one of the shortnames in any of our datasets - for dataset, collections in dataset_collections.items(): + for dataset, collections in _dataset_collection_items: if collections.get(product_type_key) is not None: subclass = dataset_to_product_types.get(dataset) if subclass is not None: From 151b1f21787c1d81b2178fbae08a5c8dbd9893fc Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 6 May 2024 16:21:49 -0800 Subject: [PATCH 25/40] use dict.get() in kml --- asf_search/export/kml.py | 12 ++++++------ asf_search/search/search_generator.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/asf_search/export/kml.py b/asf_search/export/kml.py index 1486a1f8..c2dadcad 100644 --- a/asf_search/export/kml.py +++ b/asf_search/export/kml.py @@ -139,12 +139,12 @@ def getItem(self, p): # Helper method for getting additional fields in
    tag def metadata_fields(self, item: Dict): required = { - 'Processing type: ': item['processingTypeDisplay'], - 'Frame: ': item['frameNumber'], - 'Path: ': item['pathNumber'], - 'Orbit: ': item['orbit'], - 'Start time: ': item['startTime'], - 'End time: ': item['stopTime'], + 'Processing type: ': item.get('processingTypeDisplay'), + 'Frame: ': item.get('frameNumber'), + 'Path: ': item.get('pathNumber'), + 'Orbit: ': item.get('orbit'), + 'Start time: ': item.get('startTime'), + 'End time: ': item.get('stopTime'), } optional = {} diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index f002d2f1..b2dffe3e 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -289,7 +289,7 @@ def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: # if there's a direct entry in our dataset to product type dict # perf = time.time() - subclass = dataset_collections.get(product_type_key) + subclass = dataset_to_product_types.get(product_type_key) if subclass is not None: # ASF_LOGGER.warning(f'subclass selection time {time.time() - perf}') return subclass(item, session=session) From 7329d34367594607656cf88a1361507e7e1f51f7 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 12 Jul 2024 16:42:29 -0800 Subject: [PATCH 26/40] adds flake8 lint workflow --- .github/workflows/lint.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 .github/workflows/lint.yml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..5422cde5 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,15 @@ +name: Lint + +on: + pull_request: + +jobs: + lint: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - uses: TrueBrain/actions-flake8@v2 + with: + flake8_version: 6.0.0 + path: asf_search From b11cc7382cd23b7e4041f720a9c06c1f79ec917c Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 15 Jul 2024 11:32:50 -0800 Subject: [PATCH 27/40] flake8 compliance on non-test code, set line limit to 100 --- .github/workflows/lint.yml | 1 + asf_search/ASFProduct.py | 324 +++++++---- .../ASFSearchOptions/ASFSearchOptions.py | 20 +- asf_search/ASFSearchOptions/__init__.py | 4 +- asf_search/ASFSearchOptions/validator_map.py | 108 ++-- asf_search/ASFSearchOptions/validators.py | 140 +++-- asf_search/ASFSearchResults.py | 48 +- asf_search/ASFSession.py | 262 ++++++--- asf_search/ASFStackableProduct.py | 47 +- asf_search/CMR/MissionList.py | 11 +- asf_search/CMR/__init__.py | 16 +- asf_search/CMR/datasets.py | 94 +-- asf_search/CMR/field_map.py | 12 +- asf_search/CMR/subquery.py | 117 ++-- asf_search/CMR/translate.py | 147 +++-- asf_search/Products/AIRSARProduct.py | 27 +- asf_search/Products/ALOSProduct.py | 33 +- asf_search/Products/ARIAS1GUNWProduct.py | 67 ++- asf_search/Products/ERSProduct.py | 28 +- asf_search/Products/JERSProduct.py | 23 +- asf_search/Products/NISARProduct.py | 30 +- asf_search/Products/OPERAS1Product.py | 122 ++-- asf_search/Products/RADARSATProduct.py | 20 +- asf_search/Products/S1BurstProduct.py | 135 +++-- asf_search/Products/S1Product.py | 99 ++-- asf_search/Products/SEASATProduct.py | 19 +- asf_search/Products/SIRCProduct.py | 19 +- asf_search/Products/SMAPProduct.py | 22 +- asf_search/Products/UAVSARProduct.py | 19 +- asf_search/Products/__init__.py | 28 +- asf_search/WKT/RepairEntry.py | 2 +- asf_search/WKT/__init__.py | 4 +- asf_search/WKT/validate_wkt.py | 237 +++++--- asf_search/__init__.py | 60 +- asf_search/baseline/__init__.py | 4 +- asf_search/baseline/calc.py | 184 +++--- asf_search/baseline/stack.py | 77 ++- asf_search/constants/BEAMMODE.py | 94 +-- asf_search/constants/DATASET.py | 32 +- asf_search/constants/FLIGHT_DIRECTION.py | 4 +- asf_search/constants/INSTRUMENT.py | 6 +- asf_search/constants/INTERNAL.py | 4 +- asf_search/constants/PLATFORM.py | 30 +- asf_search/constants/POLARIZATION.py | 32 +- asf_search/constants/PRODUCT_TYPE.py | 140 ++--- asf_search/constants/__init__.py | 19 +- asf_search/download/__init__.py | 4 +- asf_search/download/download.py | 76 ++- asf_search/download/file_download_type.py | 1 + asf_search/exceptions.py | 4 + asf_search/export/__init__.py | 14 +- asf_search/export/csv.py | 180 +++--- asf_search/export/export_translators.py | 29 +- asf_search/export/geojson.py | 24 +- asf_search/export/jsonlite.py | 203 ++++--- asf_search/export/jsonlite2.py | 96 ++-- asf_search/export/kml.py | 234 +++++--- asf_search/export/metalink.py | 69 +-- asf_search/health/__init__.py | 2 +- asf_search/health/health.py | 16 +- asf_search/search/__init__.py | 16 +- asf_search/search/baseline_search.py | 103 ++-- asf_search/search/campaigns.py | 39 +- asf_search/search/error_reporting.py | 43 +- asf_search/search/geo_search.py | 210 +++++-- asf_search/search/granule_search.py | 23 +- asf_search/search/product_search.py | 21 +- asf_search/search/search.py | 239 +++++--- asf_search/search/search_count.py | 108 ++-- asf_search/search/search_generator.py | 540 ++++++++++++------ 70 files changed, 3283 insertions(+), 1982 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 5422cde5..22a36ffa 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -13,3 +13,4 @@ jobs: with: flake8_version: 6.0.0 path: asf_search + max_line_length: 100 diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index e2681ff2..aff9be49 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -27,8 +27,10 @@ class ASFProduct: - geometry: - The geometry `{coordinates: [[lon, lat] ...], 'type': Polygon}` - baseline: - - used for spatio-temporal baseline stacking, stores state vectors/ascending node time/insar baseline values when available (Not set in base ASFProduct class) - - See `S1Product` or `ALOSProduct` `get_baseline_calc_properties()` methods for implementation examples + - used for spatio-temporal baseline stacking, stores state vectors/ascending + node time/insar baseline values when available (Not set in base ASFProduct class) + - See `S1Product` or `ALOSProduct` `get_baseline_calc_properties()` + methods for implementation examples Key methods: - `download()` @@ -37,34 +39,87 @@ class ASFProduct: """ + @classmethod def get_classname(cls): return cls.__name__ _base_properties = { - # min viable product - 'centerLat': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0], 'cast': try_parse_float}, - 'centerLon': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0], 'cast': try_parse_float}, - 'stopTime': {'path': ['TemporalExtent', 'RangeDateTime', 'EndingDateTime'], 'cast': try_parse_date}, # primary search results sort key - 'fileID': {'path': ['GranuleUR']}, # secondary search results sort key - 'flightDirection': {'path': [ 'AdditionalAttributes', ('Name', 'ASCENDING_DESCENDING'), 'Values', 0]}, - 'pathNumber': {'path': ['AdditionalAttributes', ('Name', 'PATH_NUMBER'), 'Values', 0], 'cast': try_parse_int}, - 'processingLevel': {'path': [ 'AdditionalAttributes', ('Name', 'PROCESSING_TYPE'), 'Values', 0]}, - - # commonly used - 'url': {'path': [ 'RelatedUrls', ('Type', 'GET DATA'), 'URL']}, - 'startTime': {'path': [ 'TemporalExtent', 'RangeDateTime', 'BeginningDateTime'], 'cast': try_parse_date}, - 'sceneName': {'path': [ 'DataGranule', 'Identifiers', ('IdentifierType', 'ProducerGranuleId'), 'Identifier']}, - 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, - 'platform': {'path': [ 'AdditionalAttributes', ('Name', 'ASF_PLATFORM'), 'Values', 0]}, - 'bytes': {'path': [ 'AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, - 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, - 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, # overloaded by S1, ALOS, and ERS - 'granuleType': {'path': [ 'AdditionalAttributes', ('Name', 'GRANULE_TYPE'), 'Values', 0]}, - 'orbit': {'path': [ 'OrbitCalculatedSpatialDomains', 0, 'OrbitNumber'], 'cast': try_parse_int}, - 'polarization': {'path': [ 'AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values', 0]}, - 'processingDate': {'path': [ 'DataGranule', 'ProductionDateTime'], 'cast': try_parse_date}, - 'sensor': {'path': [ 'Platforms', 0, 'Instruments', 0, 'ShortName'], }, + # min viable product + "centerLat": { + "path": ["AdditionalAttributes", ("Name", "CENTER_LAT"), "Values", 0], + "cast": try_parse_float, + }, + "centerLon": { + "path": ["AdditionalAttributes", ("Name", "CENTER_LON"), "Values", 0], + "cast": try_parse_float, + }, + "stopTime": { + "path": ["TemporalExtent", "RangeDateTime", "EndingDateTime"], + "cast": try_parse_date, + }, # primary search results sort key + "fileID": {"path": ["GranuleUR"]}, # secondary search results sort key + "flightDirection": { + "path": [ + "AdditionalAttributes", + ("Name", "ASCENDING_DESCENDING"), + "Values", + 0, + ] + }, + "pathNumber": { + "path": ["AdditionalAttributes", ("Name", "PATH_NUMBER"), "Values", 0], + "cast": try_parse_int, + }, + "processingLevel": { + "path": ["AdditionalAttributes", ("Name", "PROCESSING_TYPE"), "Values", 0] + }, + # commonly used + "url": {"path": ["RelatedUrls", ("Type", "GET DATA"), "URL"]}, + "startTime": { + "path": ["TemporalExtent", "RangeDateTime", "BeginningDateTime"], + "cast": try_parse_date, + }, + "sceneName": { + "path": [ + "DataGranule", + "Identifiers", + ("IdentifierType", "ProducerGranuleId"), + "Identifier", + ] + }, + "browse": { + "path": ["RelatedUrls", ("Type", [("GET RELATED VISUALIZATION", "URL")])] + }, + "platform": { + "path": ["AdditionalAttributes", ("Name", "ASF_PLATFORM"), "Values", 0] + }, + "bytes": { + "path": ["AdditionalAttributes", ("Name", "BYTES"), "Values", 0], + "cast": try_round_float, + }, + "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, + "frameNumber": { + "path": ["AdditionalAttributes", ("Name", "CENTER_ESA_FRAME"), "Values", 0], + "cast": try_parse_int, + }, # overloaded by S1, ALOS, and ERS + "granuleType": { + "path": ["AdditionalAttributes", ("Name", "GRANULE_TYPE"), "Values", 0] + }, + "orbit": { + "path": ["OrbitCalculatedSpatialDomains", 0, "OrbitNumber"], + "cast": try_parse_int, + }, + "polarization": { + "path": ["AdditionalAttributes", ("Name", "POLARIZATION"), "Values", 0] + }, + "processingDate": { + "path": ["DataGranule", "ProductionDateTime"], + "cast": try_parse_date, + }, + "sensor": { + "path": ["Platforms", 0, "Instruments", 0, "ShortName"], + }, } """ _base_properties dictionary, mapping readable property names to paths and optional type casting @@ -74,19 +129,20 @@ def get_classname(cls): - `path`: the expected path in the CMR UMM json granule response as a list - `cast`: (optional): the optional type casting method - Defining `_base_properties` in subclasses allows for defining custom properties or overiding existing ones. + Defining `_base_properties` in subclasses allows for + defining custom properties or overiding existing ones. See `S1Product.get_property_paths()` on how subclasses are expected to combine `ASFProduct._base_properties` with their own separately defined `_base_properties` """ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): - self.meta = args.get('meta') - self.umm = args.get('umm') + self.meta = args.get("meta") + self.umm = args.get("umm") translated = self.translate_product(args) - self.properties = translated['properties'] - self.geometry = translated['geometry'] + self.properties = translated["properties"] + self.geometry = translated["geometry"] self.baseline = None self.session = session @@ -94,14 +150,23 @@ def __str__(self): return json.dumps(self.geojson(), indent=2, sort_keys=True) def geojson(self) -> Dict: - """Returns ASFProduct object as a geojson formatted dictionary, with `type`, `geometry`, and `properties` keys""" + """ + Returns ASFProduct object as a geojson formatted dictionary + with `type`, `geometry`, and `properties` keys + """ return { - 'type': 'Feature', - 'geometry': self.geometry, - 'properties': self.properties + "type": "Feature", + "geometry": self.geometry, + "properties": self.properties, } - def download(self, path: str, filename: str = None, session: ASFSession = None, fileType = FileDownloadType.DEFAULT_FILE) -> None: + def download( + self, + path: str, + filename: str = None, + session: ASFSession = None, + fileType=FileDownloadType.DEFAULT_FILE, + ) -> None: """ Downloads this product to the specified path and optional filename. @@ -112,15 +177,18 @@ def download(self, path: str, filename: str = None, session: ASFSession = None, :return: None """ - default_filename = self.properties['fileName'] + default_filename = self.properties["fileName"] if filename is not None: multiple_files = ( - (fileType == FileDownloadType.ADDITIONAL_FILES and len(self.properties['additionalUrls']) > 1) - or fileType == FileDownloadType.ALL_FILES - ) + fileType == FileDownloadType.ADDITIONAL_FILES + and len(self.properties["additionalUrls"]) > 1 + ) or fileType == FileDownloadType.ALL_FILES if multiple_files: - warnings.warn(f"Attempting to download multiple files for product, ignoring user provided filename argument \"{filename}\", using default.") + warnings.warn( + 'Attempting to download multiple files for product, ' + f'ignoring user provided filename argument "{filename}", using default.' + ) else: default_filename = filename @@ -130,23 +198,29 @@ def download(self, path: str, filename: str = None, session: ASFSession = None, urls = [] if fileType == FileDownloadType.DEFAULT_FILE: - urls.append((default_filename, self.properties['url'])) + urls.append((default_filename, self.properties["url"])) elif fileType == FileDownloadType.ADDITIONAL_FILES: urls.extend(self._get_additional_filenames_and_urls(default_filename)) elif fileType == FileDownloadType.ALL_FILES: - urls.append((default_filename, self.properties['url'])) + urls.append((default_filename, self.properties["url"])) urls.extend(self._get_additional_filenames_and_urls(default_filename)) else: - raise ValueError("Invalid FileDownloadType provided, the valid types are 'DEFAULT_FILE', 'ADDITIONAL_FILES', and 'ALL_FILES'") + raise ValueError( + 'Invalid FileDownloadType provided, ' + 'the valid types are "DEFAULT_FILE", "ADDITIONAL_FILES", and "ALL_FILES"' + ) for filename, url in urls: download_url(url=url, path=path, filename=filename, session=session) def _get_additional_filenames_and_urls( - self, - default_filename: str = None # for subclasses without fileName in url (see S1BurstProduct implementation) - ) -> List[Tuple[str, str]]: - return [(self._parse_filename_from_url(url), url) for url in self.properties.get('additionalUrls', [])] + self, + default_filename: str = None, # for subclasses without fileName in url (see S1BurstProduct implementation) # noqa F401 + ) -> List[Tuple[str, str]]: + return [ + (self._parse_filename_from_url(url), url) + for url in self.properties.get("additionalUrls", []) + ] def _parse_filename_from_url(self, url: str) -> str: file_path = os.path.split(parse.urlparse(url).path) @@ -154,17 +228,22 @@ def _parse_filename_from_url(self, url: str) -> str: return filename def stack( - self, - opts: ASFSearchOptions = None, - useSubclass: Type['ASFProduct'] = None + self, opts: ASFSearchOptions = None, useSubclass: Type["ASFProduct"] = None ) -> ASFSearchResults: """ Builds a baseline stack from this product. - - :param opts: An ASFSearchOptions object describing the search parameters to be used. Search parameters specified outside this object will override in event of a conflict. - :param ASFProductSubclass: An ASFProduct subclass constructor. - - :return: ASFSearchResults containing the stack, with the addition of baseline values (temporal, perpendicular) attached to each ASFProduct. + Parameters + ---------- + opts: + An ASFSearchOptions object describing the search parameters to be used. + Search parameters specified outside this object will override in event of a conflict. + ASFProductSubclass: An ASFProduct subclass constructor to cast results to + + Returns + ---------- + asf_search.ASFSearchResults + containing the stack, with the addition of baseline values + (temporal, perpendicular) attached to each ASFProduct. """ from .search.baseline_search import stack_from_product @@ -177,39 +256,47 @@ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ Build search options that can be used to find an insar stack for this product - :return: ASFSearchOptions describing appropriate options for building a stack from this product + :return: ASFSearchOptions describing appropriate options + for building a stack from this product """ return None - def _get_access_urls(self, url_types: List[str] = ['GET DATA', 'EXTENDED METADATA']) -> List[str]: + def _get_access_urls( + self, url_types: List[str] = ["GET DATA", "EXTENDED METADATA"] + ) -> List[str]: accessUrls = [] for url_type in url_types: - if urls := self.umm_get(self.umm, 'RelatedUrls', ('Type', [(url_type, 'URL')]), 0): + if urls := self.umm_get( + self.umm, "RelatedUrls", ("Type", [(url_type, "URL")]), 0 + ): accessUrls.extend(urls) return sorted(list(set(accessUrls))) - + def _get_additional_urls(self) -> List[str]: - accessUrls = self._get_access_urls(['GET DATA', 'EXTENDED METADATA']) + accessUrls = self._get_access_urls(["GET DATA", "EXTENDED METADATA"]) return [ - url for url in accessUrls if not url.endswith('.md5') - and not url.startswith('s3://') - and 's3credentials' not in url - and not url.endswith('.png') - and url != self.properties['url'] + url + for url in accessUrls + if not url.endswith(".md5") + and not url.startswith("s3://") + and "s3credentials" not in url + and not url.endswith(".png") + and url != self.properties["url"] ] - - def _get_s3_urls(self) -> List[str]: - s3_urls = self._get_access_urls(['GET DATA', 'EXTENDED METADATA', 'GET DATA VIA DIRECT ACCESS']) - return [url for url in s3_urls if url.startswith('s3://')] + def _get_s3_urls(self) -> List[str]: + s3_urls = self._get_access_urls( + ["GET DATA", "EXTENDED METADATA", "GET DATA VIA DIRECT ACCESS"] + ) + return [url for url in s3_urls if url.startswith("s3://")] def centroid(self) -> Point: """ Finds the centroid of a product """ - coords = mapping(shape(self.geometry))['coordinates'][0] + coords = mapping(shape(self.geometry))["coordinates"][0] lons = [p[0] for p in coords] if max(lons) - min(lons) > 180: unwrapped_coords = [a if a[0] > 0 else [a[0] + 360, a[1]] for a in coords] @@ -218,58 +305,62 @@ def centroid(self) -> Point: return Polygon(unwrapped_coords).centroid - def remotezip(self, session: ASFSession) -> 'RemoteZip': - """Returns a RemoteZip object which can be used to download a part of an ASFProduct's zip archive. - (See example in examples/5-Download.ipynb) - + def remotezip(self, session: ASFSession) -> "RemoteZip": # type: ignore # noqa: F821 + """Returns a RemoteZip object which can be used to download + a part of an ASFProduct's zip archive. (See example in examples/5-Download.ipynb) + requires installing optional dependencies via pip or conda to use the `remotezip` package: - + `python3 -m pip install asf-search[extras]` :param session: an authenticated ASFSession """ from .download.download import remotezip - return remotezip(self.properties['url'], session=session) + return remotezip(self.properties["url"], session=session) def _read_umm_property(self, umm: Dict, mapping: Dict) -> Any: - value = self.umm_get(umm, *mapping['path']) - if mapping.get('cast') is None: + value = self.umm_get(umm, *mapping["path"]) + if mapping.get("cast") is None: return value - return self.umm_cast(mapping['cast'], value) + return self.umm_cast(mapping["cast"], value) def translate_product(self, item: Dict) -> Dict: """ Generates `properties` and `geometry` from the CMR UMM response """ try: - coordinates = item['umm']['SpatialExtent']['HorizontalSpatialDomain']['Geometry']['GPolygons'][0]['Boundary']['Points'] - coordinates = [[c['Longitude'], c['Latitude']] for c in coordinates] - geometry = {'coordinates': [coordinates], 'type': 'Polygon'} + coordinates = item["umm"]["SpatialExtent"]["HorizontalSpatialDomain"][ + "Geometry" + ]["GPolygons"][0]["Boundary"]["Points"] + coordinates = [[c["Longitude"], c["Latitude"]] for c in coordinates] + geometry = {"coordinates": [coordinates], "type": "Polygon"} except KeyError: - geometry = {'coordinates': None, 'type': 'Polygon'} + geometry = {"coordinates": None, "type": "Polygon"} - umm = item.get('umm') + umm = item.get("umm") properties = { prop: self._read_umm_property(umm, umm_mapping) for prop, umm_mapping in self.get_property_paths().items() } - if properties.get('url') is not None: - properties['fileName'] = properties['url'].split('/')[-1] + if properties.get("url") is not None: + properties["fileName"] = properties["url"].split("/")[-1] else: - properties['fileName'] = None + properties["fileName"] = None # Fallbacks - if properties.get('beamModeType') is None: - properties['beamModeType'] = self.umm_get(umm, 'AdditionalAttributes', ('Name', 'BEAM_MODE'), 'Values', 0) + if properties.get("beamModeType") is None: + properties["beamModeType"] = self.umm_get( + umm, "AdditionalAttributes", ("Name", "BEAM_MODE"), "Values", 0 + ) - if properties.get('platform') is None: - properties['platform'] = self.umm_get(umm, 'Platforms', 0, 'ShortName') + if properties.get("platform") is None: + properties["platform"] = self.umm_get(umm, "Platforms", 0, "ShortName") - return {'geometry': geometry, 'properties': properties, 'type': 'Feature'} + return {"geometry": geometry, "properties": properties, "type": "Feature"} # ASFProduct subclasses define extra/override param key + UMM pathing here @staticmethod @@ -279,8 +370,11 @@ def get_property_paths() -> Dict: define which properties they want in their subclass's properties dict. (See `S1Product.get_property_paths()` for example of combining _base_properties of multiple classes) - - :returns dictionary, {`PROPERTY_NAME`: {'path': [umm, path, to, value], 'cast (optional)': Callable_to_cast_value}, ...} + + Returns + ---------- + :dict + {`PROPERTY_NAME`: {'path': [umm, path, to, value], 'cast (optional)': Callable_to_cast_value}, ...} # noqa F401 """ return ASFProduct._base_properties @@ -291,25 +385,25 @@ def get_sort_keys(self) -> Tuple[str, str]: """ # `sort()` will raise an error when comparing `NoneType`, # using self._read_property() to wrap standard `dict.get()` for possible `None` values - primary_key = self._read_property(key='stopTime', default='') + primary_key = self._read_property(key="stopTime", default="") secondary_key = self._read_property( - key='fileID', - default=self._read_property('sceneName', '') + key="fileID", default=self._read_property("sceneName", "") ) - + return (primary_key, secondary_key) - + def _read_property(self, key: str, default: Any = None) -> Any: """ Helper method wraps `properties.get()`. - Since a property can be `None`, if the key exists `dict.get('key', 'default')` will never return the default + Since a property can be `None`, if the key exists`dict.get('key', 'default')` + will never return the default """ output = default - if (value:=self.properties.get(key)) is not None: + if (value := self.properties.get(key)) is not None: output = value - + return output - + @final @staticmethod def umm_get(item: Dict, *args): @@ -340,9 +434,11 @@ def umm_get(item: Dict, *args): result: 'VV' ``` - - `'AdditionalAttributes'` acts like item['AdditionalAttributes'], which is a list of dictionaries + - `'AdditionalAttributes'` acts like item['AdditionalAttributes'], + which is a list of dictionaries - - Since `AdditionalAttributes` is a LIST of dictionaries, we search for a dict with the key value pair, + - Since `AdditionalAttributes` is a LIST of dictionaries, + we search for a dict with the key value pair, `('Name', 'POLARIZATION')` - If found, we try to access that dictionary's `Values` key @@ -373,11 +469,14 @@ def umm_get(item: Dict, *args): --- ADVANCED: - Sometimes there are multiple dictionaries in a list that have the same key value pair we're searching for - (See `OPERAS1Product` umm under `RelatedUrls`). This means we can miss values since we're only grabbing the first match - depending on how the umm is organized. There is a way to get ALL data that matches our key value criteria. - - Example: "I need ALL `URL` values for dictionaries in `RelatedUrls` where `Type` is `GET DATA`" (See in use in `OPERAS1Product` class) + Sometimes there are multiple dictionaries in a list that have + the same key value pair we're searching for (See `OPERAS1Product` umm under `RelatedUrls`). + This means we can miss values since we're only grabbing the first match + depending on how the umm is organized. + There is a way to get ALL data that matches our key value criteria. + + Example: "I need ALL `URL` values for dictionaries in `RelatedUrls` + where `Type` is `GET DATA`" (See in use in `OPERAS1Product` class) ``` 'RelatedUrls', ('Type', [('GET DATA', 'URL')]), 0 ``` @@ -412,7 +511,7 @@ def umm_get(item: Dict, *args): item = item.get(key) if item is None: return None - if item in [None, 'NA', 'N/A', '']: + if item in [None, "NA", "N/A", ""]: item = None return item @@ -428,7 +527,8 @@ def umm_cast(f, v): @staticmethod def _is_subclass(item: Dict) -> bool: """ - Used to determine which subclass to use for specific edge-cases when parsing results in search methods + Used to determine which subclass to use for specific + edge-cases when parsing results in search methods (Currently implemented for ARIA and OPERA subclasses). params: diff --git a/asf_search/ASFSearchOptions/ASFSearchOptions.py b/asf_search/ASFSearchOptions/ASFSearchOptions.py index a2d3d9d3..d67aceaf 100644 --- a/asf_search/ASFSearchOptions/ASFSearchOptions.py +++ b/asf_search/ASFSearchOptions/ASFSearchOptions.py @@ -5,25 +5,28 @@ from .config import config from asf_search import ASF_LOGGER + class ASFSearchOptions: def __init__(self, **kwargs): """ - Initialize the object, creating the list of attributes based on the contents of validator_map, and assign them based on kwargs + Initialize the object, creating the list of attributes + based on the contents of validator_map, and assign them based on kwargs :param kwargs: any search options to be set immediately """ # init the built in attrs: for key in validator_map: self.__setattr__(key, None) - + # Apply any parameters passed in: for key, value in kwargs.items(): self.__setattr__(key, value) def __setattr__(self, key, value): """ - Set a search option, restricting to the keys in validator_map only, and applying validation to the value before setting - + Set a search option, restricting to the keys in validator_map only, + and applying validation to the value before setting + :param key: the name of the option to be set :param value: the value to which to set the named option """ @@ -105,7 +108,8 @@ def reset_search(self): def merge_args(self, **kwargs) -> None: """ - Merges all keyword args into this ASFSearchOptions object. Emits a warning for any options that are over-written by the operation. + Merges all keyword args into this ASFSearchOptions object. + Emits a warning for any options that are over-written by the operation. :param kwargs: The search options to merge into the object :return: None @@ -113,7 +117,11 @@ def merge_args(self, **kwargs) -> None: for key in kwargs: # Spit out warning if the value is something other than the default: if not self._is_val_default(key): - msg = f'While merging search options, existing option {key}:{getattr(self, key, None)} overwritten by kwarg with value {kwargs[key]}' + msg = ( + 'While merging search options, ' + f'existing option {key}:{getattr(self, key, None)} ' + f'overwritten by kwarg with value {kwargs[key]}' + ) ASF_LOGGER.warning(msg) warnings.warn(msg) self.__setattr__(key, kwargs[key]) diff --git a/asf_search/ASFSearchOptions/__init__.py b/asf_search/ASFSearchOptions/__init__.py index bd19b6d0..a41f85ff 100644 --- a/asf_search/ASFSearchOptions/__init__.py +++ b/asf_search/ASFSearchOptions/__init__.py @@ -1,2 +1,2 @@ -from .ASFSearchOptions import ASFSearchOptions -from .validators import * +from .ASFSearchOptions import ASFSearchOptions # noqa F401 +from .validators import * # noqa F401 F403 diff --git a/asf_search/ASFSearchOptions/validator_map.py b/asf_search/ASFSearchOptions/validator_map.py index 604142ab..b44bf8ca 100644 --- a/asf_search/ASFSearchOptions/validator_map.py +++ b/asf_search/ASFSearchOptions/validator_map.py @@ -1,73 +1,81 @@ from asf_search import ASF_LOGGER from .validators import ( - parse_string, parse_float, parse_wkt, parse_date, - parse_string_list, parse_int_list, parse_int_or_range_list, - parse_float_or_range_list, parse_cmr_keywords_list, - parse_session + parse_string, + parse_float, + parse_wkt, + parse_date, + parse_string_list, + parse_int_list, + parse_int_or_range_list, + parse_float_or_range_list, + parse_cmr_keywords_list, + parse_session, ) def validate(key, value): if key not in validator_map: - error_msg = f"Key '{key}' is not a valid search option." + error_msg = f'Key "{key}" is not a valid search option.' # See if they just missed up case sensitivity: for valid_key in validator_map: if key.lower() == valid_key.lower(): - error_msg += f" (Did you mean '{valid_key}'?)" + error_msg += f' (Did you mean "{valid_key}"?)' break ASF_LOGGER.error(error_msg) raise KeyError(error_msg) try: return validator_map[key](value) except ValueError as exc: - ASF_LOGGER.exception(f"Failed to parse item in ASFSearchOptions: {key=} {value=} {exc=}") + ASF_LOGGER.exception( + f'Failed to parse item in ASFSearchOptions: {key=} {value=} {exc=}' + ) raise + validator_map = { # Search parameters Parser - 'maxResults': int, - 'absoluteOrbit': parse_int_or_range_list, - 'asfFrame': parse_int_or_range_list, - 'beamMode': parse_string_list, - 'beamSwath': parse_string_list, - 'campaign': parse_string, - 'maxDoppler': parse_float, - 'minDoppler': parse_float, - 'maxFaradayRotation': parse_float, - 'minFaradayRotation': parse_float, - 'flightDirection': parse_string, - 'flightLine': parse_string, - 'frame': parse_int_or_range_list, - 'granule_list': parse_string_list, - 'product_list': parse_string_list, - 'intersectsWith': parse_wkt, - 'lookDirection': parse_string, - 'offNadirAngle': parse_float_or_range_list, - 'platform': parse_string_list, - 'polarization': parse_string_list, - 'processingLevel': parse_string_list, - 'relativeOrbit': parse_int_or_range_list, - 'processingDate': parse_date, - 'start': parse_date, - 'end': parse_date, - 'season': parse_int_list, - 'groupID': parse_string_list, - 'insarStackId': parse_string, - 'instrument': parse_string, - 'collections': parse_string_list, - 'shortName': parse_string_list, - 'temporalBaselineDays': parse_string_list, - 'operaBurstID': parse_string_list, - 'absoluteBurstID': parse_int_list, - 'relativeBurstID': parse_int_list, - 'fullBurstID': parse_string_list, - 'dataset': parse_string_list, - 'cmr_keywords': parse_cmr_keywords_list, - + 'maxResults': int, + 'absoluteOrbit': parse_int_or_range_list, + 'asfFrame': parse_int_or_range_list, + 'beamMode': parse_string_list, + 'beamSwath': parse_string_list, + 'campaign': parse_string, + 'maxDoppler': parse_float, + 'minDoppler': parse_float, + 'maxFaradayRotation': parse_float, + 'minFaradayRotation': parse_float, + 'flightDirection': parse_string, + 'flightLine': parse_string, + 'frame': parse_int_or_range_list, + 'granule_list': parse_string_list, + 'product_list': parse_string_list, + 'intersectsWith': parse_wkt, + 'lookDirection': parse_string, + 'offNadirAngle': parse_float_or_range_list, + 'platform': parse_string_list, + 'polarization': parse_string_list, + 'processingLevel': parse_string_list, + 'relativeOrbit': parse_int_or_range_list, + 'processingDate': parse_date, + 'start': parse_date, + 'end': parse_date, + 'season': parse_int_list, + 'groupID': parse_string_list, + 'insarStackId': parse_string, + 'instrument': parse_string, + 'collections': parse_string_list, + 'shortName': parse_string_list, + 'temporalBaselineDays': parse_string_list, + 'operaBurstID': parse_string_list, + 'absoluteBurstID': parse_int_list, + 'relativeBurstID': parse_int_list, + 'fullBurstID': parse_string_list, + 'dataset': parse_string_list, + 'cmr_keywords': parse_cmr_keywords_list, # Config parameters Parser - 'session': parse_session, - 'host': parse_string, - 'provider': parse_string, - 'collectionAlias': bool, + 'session': parse_session, + 'host': parse_string, + 'provider': parse_string, + 'collectionAlias': bool, } diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index b1a30a4e..5d23044f 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -7,9 +7,9 @@ import math from shapely import wkt, errors - number = TypeVar('number', int, float) + def parse_string(value: str) -> str: """ Base string validator. Maybe silly, but we can also ensure any constraints needed in the future. @@ -18,11 +18,13 @@ def parse_string(value: str) -> str: """ # Convert to string first, so length is checked against only str types: try: - value = f'{value}' - except ValueError as exc: # If this happens, printing v's value would fail too... - raise ValueError(f"Invalid string: Can't cast type '{type(value)}' to string.") from exc + value = f"{value}" + except ValueError as exc: # If this happens, printing v's value would fail too... + raise ValueError( + f"Invalid string: Can't cast type '{type(value)}' to string." + ) from exc if len(value) == 0: - raise ValueError(f'Invalid string: Empty.') + raise ValueError('Invalid string: Empty.') return value @@ -35,9 +37,9 @@ def parse_float(value: float) -> float: try: value = float(value) except ValueError as exc: - raise ValueError(f'Invalid float: {value}') from exc + raise ValueError(f"Invalid float: {value}") from exc if math.isinf(value): - raise ValueError(f'Float values must be finite: got {value}') + raise ValueError(f"Float values must be finite: got {value}") return value @@ -50,43 +52,64 @@ def parse_date(value: Union[str, datetime]) -> Union[datetime, str]: """ if isinstance(value, datetime): return _to_utc(value) - + date = dateparser.parse(str(value)) if date is None: raise ValueError(f"Invalid date: '{value}'.") - - return _to_utc(date).strftime('%Y-%m-%dT%H:%M:%SZ') + + return _to_utc(date).strftime("%Y-%m-%dT%H:%M:%SZ") + def _to_utc(date: datetime): if date.tzinfo is None: date = date.replace(tzinfo=timezone.utc) return date - -def parse_range(value: Tuple[number, number], h: Callable[[number], number]) -> Tuple[number, number]: + + +def parse_range( + value: Tuple[number, number], h: Callable[[number], number] +) -> Tuple[number, number]: """ - Base range validator. For our purposes, a range is a tuple with exactly two numeric elements (a, b), requiring a <= b. - :param value: The range to be validated. Examples: (3, 5), (1.1, 12.3) - :param h: The validator function to apply to each individual value - :return: Validated tuple representing the range + Base range validator. For our purposes, a range is a tuple + with exactly two numeric elements (a, b), requiring a <= b. + + Parameters + ---------- + value: The range to be validated. Examples: (3, 5), (1.1, 12.3) + h: The validator function to apply to each individual value + + Returns + ---------- + Validated tuple representing the range """ if isinstance(value, tuple): if len(value) < 2: - raise ValueError(f'Not enough values in min/max tuple: {value}') + raise ValueError(f"Not enough values in min/max tuple: {value}") if len(value) > 2: - raise ValueError(f'Too many values in min/max tuple: {value}') + raise ValueError(f"Too many values in min/max tuple: {value}") value = (h(value[0]), h(value[1])) if math.isinf(value[0]) or math.isnan(value[0]): - raise ValueError(f'Expected finite numeric min in min/max tuple, got {value[0]}: {value}') + raise ValueError( + f"Expected finite numeric min in min/max tuple, got {value[0]}: {value}" + ) if math.isinf(value[1]) or math.isnan(value[1]): - raise ValueError(f'Expected finite numeric max in min/max tuple, got {value[1]}: {value}') + raise ValueError( + f"Expected finite numeric max in min/max tuple, got {value[1]}: {value}" + ) if value[0] > value[1]: - raise ValueError(f'Min must be less than max when using min/max tuples to search: {value}') + raise ValueError( + f"Min must be less than max when using min/max tuples to search: {value}" + ) return value - raise ValueError(f'Invalid range. Expected 2-value numeric tuple, got {type(value)}: {value}') + raise ValueError( + f"Invalid range. Expected 2-value numeric tuple, got {type(value)}: {value}" + ) # Parse and validate a date range: "1991-10-01T00:00:00Z,1991-10-02T00:00:00Z" -def parse_date_range(value: Tuple[Union[str, datetime], Union[str, datetime]]) -> Tuple[datetime, datetime]: +def parse_date_range( + value: Tuple[Union[str, datetime], Union[str, datetime]], +) -> Tuple[datetime, datetime]: return parse_range(value, parse_date) @@ -100,31 +123,47 @@ def parse_float_range(value: Tuple[float, float]) -> Tuple[float, float]: return parse_range(value, float) -# Parse and validate an iterable of values, using h() to validate each value: "a,b,c", "1,2,3", "1.1,2.3" +# Parse and validate an iterable of values, using h() to validate each value: +# "a,b,c", "1,2,3", "1.1,2.3" def parse_list(value: Sequence, h) -> List: if not isinstance(value, Sequence) or isinstance(value, str): value = [value] try: return [h(a) for a in value] except ValueError as exc: - raise ValueError(f'Invalid {h.__name__} list: {exc}') from exc + raise ValueError(f"Invalid {h.__name__} list: {exc}") from exc + def parse_cmr_keywords_list(value: Sequence[Union[Dict, Sequence]]): - if not isinstance(value, Sequence) or (len(value) == 2 and isinstance(value[0], str)): # in case we're passed single key value pair as sequence + if not isinstance(value, Sequence) or ( + len(value) == 2 and isinstance(value[0], str) + ): # in case we're passed single key value pair as sequence value = [value] - + for idx, item in enumerate(value): if not isinstance(item, tuple) and not isinstance(item, Sequence): - raise ValueError(f"Expected item in cmr_keywords list index {idx} to be tuple pair, got value {item} of type {type(item)}") + raise ValueError( + f'Expected item in cmr_keywords list index {idx} to be tuple pair, ' + f'got value {item} of type {type(item)}' + ) if len(item) != 2: - raise ValueError(f"Expected item in cmr_keywords list index {idx} to be of length 2, got value {item} of length {len(item)}") - + raise ValueError( + f'Expected item in cmr_keywords list index {idx} to be of length 2, ' + f'got value {item} of length {len(item)}' + ) + search_key, search_value = item if not isinstance(search_key, str) or not isinstance(search_value, str): - raise ValueError(f"Expected tuple pair of types: \"{type(str)}, {type(str)}\" in cmr_keywords at index {idx}, got value \"{str(item)}\" of types: \"{type(search_key)}, {type(search_value)}\"") + raise ValueError( + f'Expected tuple pair of types: ' + f'"{type(str)}, {type(str)}" in cmr_keywords at index {idx}, ' + f'got value "{str(item)}" ' + f'of types: "{type(search_key)}, {type(search_value)}"' + ) return value + # Parse and validate an iterable of strings: "foo,bar,baz" def parse_string_list(value: Sequence[str]) -> List[str]: return parse_list(value, str) @@ -147,19 +186,22 @@ def parse_number_or_range(value: Union[List, Tuple[number, number], range], h): if isinstance(value, range): if value.step == 1: return [value.start, value.stop] - + return h(value) - + except ValueError as exc: - raise ValueError(f'Invalid {h.__name__} or range: {exc}') from exc - -# Parse and validate an iterable of numbers or number ranges, using h() to validate each value: "1,2,3-5", "1.1,1.4,5.1-6.7" + raise ValueError(f"Invalid {h.__name__} or range: {exc}") from exc + + +# Parse and validate an iterable of numbers or number ranges, using h() to validate each value: +# "1,2,3-5", "1.1,1.4,5.1-6.7" def parse_number_or_range_list(value: Sequence, h) -> List: if not isinstance(value, Sequence) or isinstance(value, range): value = [value] return [parse_number_or_range(x, h) for x in value] + # Parse and validate an iterable of integers or integer ranges: "1,2,3-5" def parse_int_or_range_list(value: Sequence) -> List: return parse_number_or_range_list(value, int) @@ -173,14 +215,18 @@ def parse_float_or_range_list(value: Sequence) -> List: # Parse and validate a coordinate list def parse_coord_list(value: Sequence[float]) -> List[float]: if not isinstance(value, Sequence): - raise ValueError(f'Invalid coord list list: Must pass in an iterable. Got {type(value)}.') + raise ValueError( + f"Invalid coord list list: Must pass in an iterable. Got {type(value)}." + ) for coord in value: try: float(coord) except ValueError as exc: - raise ValueError(f'Invalid coordinate: {coord}') from exc + raise ValueError(f"Invalid coordinate: {coord}") from exc if len(value) % 2 != 0: - raise ValueError(f'Invalid coordinate list, odd number of values provided: {value}') + raise ValueError( + f"Invalid coordinate list, odd number of values provided: {value}" + ) return value @@ -190,9 +236,9 @@ def parse_bbox_list(value: Sequence[float]) -> List[float]: # This also makes sure v is an iterable: value = parse_coord_list(value) except ValueError as exc: - raise ValueError(f'Invalid bbox: {exc}') from exc + raise ValueError(f"Invalid bbox: {exc}") from exc if len(value) != 4: - raise ValueError(f'Invalid bbox, must be 4 values: {value}') + raise ValueError(f"Invalid bbox, must be 4 values: {value}") return value @@ -202,9 +248,9 @@ def parse_point_list(value: Sequence[float]) -> List[float]: # This also makes sure v is an iterable: value = parse_coord_list(value) except ValueError as exc: - raise ValueError(f'Invalid point: {exc}') from exc + raise ValueError(f"Invalid point: {exc}") from exc if len(value) != 2: - raise ValueError(f'Invalid point, must be 2 values: {value}') + raise ValueError(f"Invalid point, must be 2 values: {value}") return value @@ -213,12 +259,16 @@ def parse_wkt(value: str) -> str: try: value = wkt.loads(value) except errors.WKTReadingError as exc: - raise ValueError(f'Invalid wkt: {exc}') from exc + raise ValueError(f"Invalid wkt: {exc}") from exc return wkt.dumps(value) + # Take "requests.Session", or anything that subclasses it: def parse_session(session: Type[requests.Session]): if issubclass(type(session), requests.Session): return session else: - raise ValueError(f'Invalid Session: expected ASFSession or a requests.Session subclass. Got {type(session)}') + raise ValueError( + 'Invalid Session: expected ASFSession or a requests.Session subclass. ' + f'Got {type(session)}' + ) diff --git a/asf_search/ASFSearchResults.py b/asf_search/ASFSearchResults.py index 77ef7f94..c3b2fa01 100644 --- a/asf_search/ASFSearchResults.py +++ b/asf_search/ASFSearchResults.py @@ -12,6 +12,7 @@ from asf_search.export.kml import results_to_kml from asf_search.export.metalink import results_to_metalink + class ASFSearchResults(UserList): def __init__(self, *args, opts: ASFSearchOptions = None): super().__init__(*args) @@ -22,8 +23,8 @@ def __init__(self, *args, opts: ASFSearchOptions = None): def geojson(self): return { - 'type': 'FeatureCollection', - 'features': [product.geojson() for product in self] + "type": "FeatureCollection", + "features": [product.geojson() for product in self], } def csv(self): @@ -31,7 +32,7 @@ def csv(self): def kml(self): return results_to_kml(self) - + def metalink(self): return results_to_metalink(self) @@ -39,26 +40,31 @@ def jsonlite(self): return results_to_jsonlite(self) def jsonlite2(self): - return results_to_jsonlite2(self) + return results_to_jsonlite2(self) def __str__(self): return json.dumps(self.geojson(), indent=2, sort_keys=True) def download( - self, - path: str, - session: ASFSession = None, - processes: int = 1, - fileType = FileDownloadType.DEFAULT_FILE + self, + path: str, + session: ASFSession = None, + processes: int = 1, + fileType=FileDownloadType.DEFAULT_FILE, ) -> None: """ Iterates over each ASFProduct and downloads them to the specified path. - :param path: The directory into which the products should be downloaded. - :param session: The session to use. Defaults to the session used to fetch the results, or a new one if none was used. - :param processes: Number of download processes to use. Defaults to 1 (i.e. sequential download) + Parameters + ---------- + path: + The directory into which the products should be downloaded. + session: + The session to use + Defaults to the session used to fetch the results, or a new one if none was used. + processes: + Number of download processes to use. Defaults to 1 (i.e. sequential download) - :return: None """ ASF_LOGGER.info(f"Started downloading ASFSearchResults of size {len(self)}.") if processes == 1: @@ -72,10 +78,14 @@ def download( pool.close() pool.join() ASF_LOGGER.info(f"Finished downloading ASFSearchResults of size {len(self)}.") - + def raise_if_incomplete(self) -> None: if not self.searchComplete: - msg = "Results are incomplete due to a search error. See logging for more details. (ASFSearchResults.raise_if_incomplete called)" + msg = ( + 'Results are incomplete due to a search error. ' + 'See logging for more details. (ASFSearchResults.raise_if_incomplete called)' + ) + ASF_LOGGER.error(msg) raise ASFSearchError(msg) @@ -91,12 +101,12 @@ def get_products_by_subclass_type(self) -> dict: if subclasses.get(product_type) is None: subclasses[product_type] = ASFSearchResults([]) - + subclasses[product_type].append(product) - + return subclasses - + + def _download_product(args) -> None: product, path, session, fileType = args product.download(path=path, session=session, fileType=fileType) - diff --git a/asf_search/ASFSession.py b/asf_search/ASFSession.py index 33762739..b9444374 100644 --- a/asf_search/ASFSession.py +++ b/asf_search/ASFSession.py @@ -1,5 +1,5 @@ import platform -from typing import Dict, List, Union +from typing import List, Union import requests from requests.utils import get_netrc_auth import http.cookiejar @@ -8,77 +8,130 @@ from asf_search.exceptions import ASFAuthenticationError from warnings import warn + class ASFSession(requests.Session): - def __init__(self, - edl_host: str = None, - edl_client_id: str = None, - asf_auth_host: str = None, - cmr_host: str = None, - cmr_collections: str = None, - auth_domains: List[str] = None, - auth_cookie_names: List[str] = None - ): + def __init__( + self, + edl_host: str = None, + edl_client_id: str = None, + asf_auth_host: str = None, + cmr_host: str = None, + cmr_collections: str = None, + auth_domains: List[str] = None, + auth_cookie_names: List[str] = None, + ): """ - ASFSession is a subclass of `requests.Session`, and is meant to ease downloading ASF hosted data by simplifying logging in to Earthdata Login. + ASFSession is a subclass of `requests.Session`, and is meant to ease + downloading ASF hosted data by simplifying logging in to Earthdata Login. + To create an EDL account, see here: https://urs.earthdata.nasa.gov/users/new - + ASFSession provides three built-in methods for authorizing downloads: - EDL Username and Password: `auth_with_creds()` - EDL Token: `auth_with_token()` - Authenticated cookiejars: `auth_with_cookiejar()` - `edl_host`: the Earthdata login endpoint used by auth_with_creds(). Defaults to `asf_search.constants.INTERNAL.EDL_HOST` - `edl_client_id`: The Earthdata Login client ID for this package. Defaults to `asf_search.constants.INTERNAL.EDL_CLIENT_ID` - `asf_auth_host`: the ASF auth endpoint . Defaults to `asf_search.constants.INTERNAL.ASF_AUTH_HOST` - `cmr_host (DEPRECATED V7.0.9)`: the base CMR endpoint to test EDL login tokens against. Defaults to `asf_search.constants.INTERNAL.CMR_HOST` - `cmr_collections`: the CMR endpoint path login tokens will be tested against. Defaults to `asf_search.constants.INTERNAL.CMR_COLLECTIONS` - `auth_domains`: the list of authorized endpoints that are allowed to pass auth credentials. Defaults to `asf_search.constants.INTERNAL.AUTH_DOMAINS`. Authorization headers WILL NOT be stripped from the session object when redirected through these domains. - `auth_cookie_names`: the list of cookie names to use when verifying with `auth_with_creds()` & `auth_with_cookiejar()` + Parameters + ---------- + `edl_host`: + the Earthdata login endpoint used by auth_with_creds(). + Defaults to `asf_search.constants.INTERNAL.EDL_HOST` + `edl_client_id`: + The Earthdata Login client ID for this package. + Defaults to `asf_search.constants.INTERNAL.EDL_CLIENT_ID` + `asf_auth_host`: + the ASF auth endpoint. + Defaults to `asf_search.constants.INTERNAL.ASF_AUTH_HOST` + `cmr_host (DEPRECATED V7.0.9)`: + the base CMR endpoint to test EDL login tokens against. + Defaults to `asf_search.constants.INTERNAL.CMR_HOST` + `cmr_collections`: + the CMR endpoint path login tokens will be tested against. + Defaults to `asf_search.constants.INTERNAL.CMR_COLLECTIONS` + `auth_domains`: + the list of authorized endpoints that are allowed to pass auth credentials. + Defaults to `asf_search.constants.INTERNAL.AUTH_DOMAINS`. + Authorization headers WILL NOT be stripped from the session object + when redirected through these domains. + `auth_cookie_names`: + the list of cookie names to use when verifying + with `auth_with_creds()` & `auth_with_cookiejar()` + More information on Earthdata Login can be found here: https://urs.earthdata.nasa.gov/documentation/faq """ super().__init__() - user_agent = '; '.join([ - f'Python/{platform.python_version()}', - f'{requests.__name__}/{requests.__version__}', - f'{asf_name}/{asf_version}']) + user_agent = "; ".join( + [ + f"Python/{platform.python_version()}", + f"{requests.__name__}/{requests.__version__}", + f"{asf_name}/{asf_version}", + ] + ) - self.headers.update({'User-Agent': user_agent}) # For all hosts - self.headers.update({'Client-Id': f"{asf_name}_v{asf_version}"}) # For CMR + self.headers.update({"User-Agent": user_agent}) # For all hosts + self.headers.update({"Client-Id": f"{asf_name}_v{asf_version}"}) # For CMR from asf_search.constants import INTERNAL self.edl_host = INTERNAL.EDL_HOST if edl_host is None else edl_host - self.edl_client_id = INTERNAL.EDL_CLIENT_ID if edl_client_id is None else edl_client_id - self.asf_auth_host = INTERNAL.ASF_AUTH_HOST if asf_auth_host is None else asf_auth_host - self.cmr_collections = INTERNAL.CMR_COLLECTIONS if cmr_collections is None else cmr_collections - self.auth_domains = INTERNAL.AUTH_DOMAINS if auth_domains is None else auth_domains - self.auth_cookie_names = INTERNAL.AUTH_COOKIES if auth_cookie_names is None else auth_cookie_names + self.edl_client_id = ( + INTERNAL.EDL_CLIENT_ID if edl_client_id is None else edl_client_id + ) + self.asf_auth_host = ( + INTERNAL.ASF_AUTH_HOST if asf_auth_host is None else asf_auth_host + ) + self.cmr_collections = ( + INTERNAL.CMR_COLLECTIONS if cmr_collections is None else cmr_collections + ) + self.auth_domains = ( + INTERNAL.AUTH_DOMAINS if auth_domains is None else auth_domains + ) + self.auth_cookie_names = ( + INTERNAL.AUTH_COOKIES if auth_cookie_names is None else auth_cookie_names + ) self.cmr_host = INTERNAL.CMR_HOST - + if cmr_host is not None: - warn(f'Use of `cmr_host` keyword with `ASFSession` is deprecated for asf-search versions >= 7.0.9, and will be removed with the next major version. \ - \nTo authenticate an EDL token for a non-prod deployment of CMR, set the `edl_host` keyword instead. \ - \n(ex: session arugments for authenticating against uat: `ASFSession(edl_host="uat.urs.earthdata.nasa.gov")`)', category=DeprecationWarning, stacklevel=2) + warn( + 'Use of `cmr_host` keyword with `ASFSession` is deprecated ' + 'for asf-search versions >= 7.0.9, ' + 'and will be removed with the next major version.' + '\nTo authenticate an EDL token for a non-prod deployment of CMR, ' + 'set the `edl_host` keyword instead. ' + '\n(ex: session arugments for authenticating against uat: ' + '`ASFSession(edl_host="uat.urs.earthdata.nasa.gov")`)', + category=DeprecationWarning, + stacklevel=2, + ) self.cmr_host = cmr_host def __eq__(self, other): - return self.auth == other.auth \ - and self.headers == other.headers \ - and self.cookies == other.cookies + return ( + self.auth == other.auth + and self.headers == other.headers + and self.cookies == other.cookies + ) def auth_with_creds(self, username: str, password: str): """ Authenticates the session using EDL username/password credentials - :param username: EDL username, see https://urs.earthdata.nasa.gov/ - :param password: EDL password, see https://urs.earthdata.nasa.gov/ - :param host (optional): EDL host to log in to - - :return ASFSession: returns self for convenience + Parameters + ---------- + username: + EDL username, see https://urs.earthdata.nasa.gov/ + password: + EDL password, see https://urs.earthdata.nasa.gov/ + host: + (optional): EDL host to log in to + + Returns + ---------- + ASFSession """ - login_url = f'https://{self.edl_host}/oauth/authorize?client_id={self.edl_client_id}&response_type=code&redirect_uri=https://{self.asf_auth_host}/login' + login_url = f"https://{self.edl_host}/oauth/authorize?client_id={self.edl_client_id}&response_type=code&redirect_uri=https://{self.asf_auth_host}/login" # noqa F401 self.auth = (username, password) @@ -88,17 +141,24 @@ def auth_with_creds(self, username: str, password: str): if not self._check_auth_cookies(self.cookies.get_dict()): raise ASFAuthenticationError("Username or password is incorrect") - ASF_LOGGER.info(f'Login successful') + ASF_LOGGER.info('Login successful') - token = self.cookies.get_dict().get('urs-access-token') + token = self.cookies.get_dict().get("urs-access-token") if token is None: - ASF_LOGGER.warning(f'Provided asf_auth_host "{self.asf_auth_host}" returned no EDL token during ASFSession validation. EDL Token expected in "urs-access-token" cookie, required for hidden/restricted dataset access. The current session will use basic authorization.') + ASF_LOGGER.warning( + f'Provided asf_auth_host "{self.asf_auth_host}" returned no EDL token ' + 'during ASFSession validation. EDL Token expected in "urs-access-token" cookie, ' + 'required for hidden/restricted dataset access. ' + 'The current session will use basic authorization.' + ) else: - ASF_LOGGER.info(f'Found "urs-access-token" cookie in response from auth host, using token for downloads and cmr queries.') + ASF_LOGGER.info( + 'Found "urs-access-token" cookie in response from auth host, ' + 'using token for downloads and cmr queries.' + ) self.auth = None self._update_edl_token(token=token) - return self @@ -106,46 +166,56 @@ def auth_with_token(self, token: str): """ Authenticates the session using an EDL Authorization: Bearer token - :param token: EDL Auth Token for authenticated downloads, see https://urs.earthdata.nasa.gov/user_tokens + Parameters + ---------- + token: + EDL Auth Token for authenticated downloads, see https://urs.earthdata.nasa.gov/user_tokens - :return ASFSession: returns self for convenience + Returns + ---------- + ASFSession """ - oauth_authorization = f"https://{self.edl_host}/oauth/tokens/user?client_id={self.edl_client_id}" - + oauth_authorization = ( + f"https://{self.edl_host}/oauth/tokens/user?client_id={self.edl_client_id}" + ) + ASF_LOGGER.info(f"Authenticating EDL token against {oauth_authorization}") - response = self.post(url=oauth_authorization, data={ - 'token': token - }) + response = self.post(url=oauth_authorization, data={"token": token}) if not 200 <= response.status_code <= 299: if not self._try_legacy_token_auth(token=token): raise ASFAuthenticationError("Invalid/Expired token passed") - ASF_LOGGER.info(f"EDL token authentication successful") + ASF_LOGGER.info('EDL token authentication successful') self._update_edl_token(token=token) return self def _try_legacy_token_auth(self, token: str) -> False: """ - Checks `cmr_host` search endpoint directly with provided token using method used in previous versions of asf-search (<7.0.9). + Checks `cmr_host` search endpoint directly with provided token + using method used in previous versions of asf-search (<7.0.9). + This is to prevent breaking changes until next major release """ from asf_search.constants import INTERNAL if self.cmr_host != INTERNAL.CMR_HOST: - self.headers.update({'Authorization': 'Bearer {0}'.format(token)}) + self.headers.update({"Authorization": "Bearer {0}".format(token)}) legacy_auth_url = f"https://{self.cmr_host}{self.cmr_collections}" response = self.get(legacy_auth_url) - self.headers.pop('Authorization') + self.headers.pop("Authorization") return 200 <= response.status_code <= 299 - + return False - + def _update_edl_token(self, token: str): - self.headers.update({'Authorization': 'Bearer {0}'.format(token)}) - - def auth_with_cookiejar(self, cookies: Union[http.cookiejar.CookieJar, requests.cookies.RequestsCookieJar]): + self.headers.update({"Authorization": "Bearer {0}".format(token)}) + + def auth_with_cookiejar( + self, + cookies: Union[http.cookiejar.CookieJar, requests.cookies.RequestsCookieJar], + ): """ Authenticates the session using a pre-existing cookiejar @@ -160,63 +230,81 @@ def auth_with_cookiejar(self, cookies: Union[http.cookiejar.CookieJar, requests. if cookie.is_expired(): raise ASFAuthenticationError("Cookiejar contains expired cookies") - token = cookies.get_dict().get('urs-access-token') + token = cookies.get_dict().get("urs-access-token") if token is None: - ASF_LOGGER.warning(f'Failed to find EDL Token in cookiejar. EDL Token expected in "urs-access-token" cookie, required for hidden/restricted dataset access.') + ASF_LOGGER.warning( + 'Failed to find EDL Token in cookiejar. ' + 'EDL Token expected in "urs-access-token" cookie, ' + 'required for hidden/restricted dataset access.' + ) else: - ASF_LOGGER.info(f'Authenticating EDL token found in "urs-access-token" cookie') + ASF_LOGGER.info( + 'Authenticating EDL token found in "urs-access-token" cookie' + ) try: self.auth_with_token(token) except ASFAuthenticationError: - ASF_LOGGER.warning(f'Failed to authenticate with found EDL token found. Access to hidden/restricted cmr data may be limited.') + ASF_LOGGER.warning( + 'Failed to authenticate with found EDL token found. ' + 'Access to hidden/restricted cmr data may be limited.' + ) self.cookies = cookies return self - def _check_auth_cookies(self, cookies: Union[http.cookiejar.CookieJar, requests.cookies.RequestsCookieJar]) -> bool: + def _check_auth_cookies( + self, + cookies: Union[http.cookiejar.CookieJar, requests.cookies.RequestsCookieJar], + ) -> bool: if isinstance(cookies, requests.cookies.RequestsCookieJar): cookies = dict(cookies) return any(cookie in self.auth_cookie_names for cookie in cookies) - def rebuild_auth(self, prepared_request: requests.Request, response: requests.Response): + def rebuild_auth( + self, prepared_request: requests.Request, response: requests.Response + ): """ - Overrides requests.Session.rebuild_auth() default behavior of stripping the Authorization header + Overrides requests.Session.rebuild_auth() + default behavior of stripping the Authorization header upon redirect. This allows token authentication to work with redirects to trusted domains """ headers = prepared_request.headers url = prepared_request.url - if 'Authorization' in headers: - original_domain = '.'.join(self._get_domain(response.request.url).split('.')[-3:]) - redirect_domain = '.'.join(self._get_domain(url).split('.')[-3:]) + if "Authorization" in headers: + original_domain = ".".join( + self._get_domain(response.request.url).split(".")[-3:] + ) + redirect_domain = ".".join(self._get_domain(url).split(".")[-3:]) - if (original_domain != redirect_domain - and (original_domain not in self.auth_domains - or redirect_domain not in self.auth_domains)): - del headers['Authorization'] + if original_domain != redirect_domain and ( + original_domain not in self.auth_domains + or redirect_domain not in self.auth_domains + ): + del headers["Authorization"] new_auth = get_netrc_auth(url) if self.trust_env else None if new_auth is not None: prepared_request.prepare_auth(new_auth) def _get_domain(self, url: str): - return requests.utils.urlparse(url).hostname + return requests.utils.urlparse(url).hostname - # multi-processing does an implicit copy of ASFSession objects, + # multi-processing does an implicit copy of ASFSession objects, # this ensures ASFSession class variables are included def __getstate__(self): state = super().__getstate__() state = { **state, - 'edl_host': self.edl_host, - 'edl_client_id': self.edl_client_id, - 'asf_auth_host': self.asf_auth_host, - 'cmr_host': self.cmr_host, - 'cmr_collections': self.cmr_collections, - 'auth_domains': self.auth_domains, - 'auth_cookie_names': self.auth_cookie_names + "edl_host": self.edl_host, + "edl_client_id": self.edl_client_id, + "asf_auth_host": self.asf_auth_host, + "cmr_host": self.cmr_host, + "cmr_collections": self.cmr_collections, + "auth_domains": self.auth_domains, + "auth_cookie_names": self.auth_cookie_names, } return state diff --git a/asf_search/ASFStackableProduct.py b/asf_search/ASFStackableProduct.py index 60c3830e..b99b2545 100644 --- a/asf_search/ASFStackableProduct.py +++ b/asf_search/ASFStackableProduct.py @@ -13,57 +13,68 @@ class ASFStackableProduct(ASFProduct): ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ - _base_properties = { - } + + _base_properties = {} class BaselineCalcType(Enum): """ Defines how asf-search will calculate perpendicular baseline for products of this subclass """ + PRE_CALCULATED = 0 - """Has pre-calculated insarBaseline value that will be used for perpendicular calculations""" + """Has pre-calculated insarBaseline value that will be used for perpendicular calculations""" # noqa F401 CALCULATED = 1 - """Uses position/velocity state vectors and ascending node time for perpendicular calculations""" + """Uses position/velocity state vectors and ascending node time for perpendicular calculations""" # noqa F401 - baseline_type = BaselineCalcType.PRE_CALCULATED """Determines how asf-search will attempt to stack products of this type.""" - + def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) self.baseline = self.get_baseline_calc_properties() def get_baseline_calc_properties(self) -> Dict: - insarBaseline = self.umm_cast(float, self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'INSAR_BASELINE'), 'Values', 0)) + insarBaseline = self.umm_cast( + float, + self.umm_get( + self.umm, + "AdditionalAttributes", + ("Name", "INSAR_BASELINE"), + "Values", + 0, + ), + ) if insarBaseline is None: return None - return { - 'insarBaseline': insarBaseline - } + return {"insarBaseline": insarBaseline} def get_stack_opts(self, opts: ASFSearchOptions = None): - stack_opts = (ASFSearchOptions() if opts is None else copy(opts)) + stack_opts = ASFSearchOptions() if opts is None else copy(opts) stack_opts.processingLevel = self.get_default_baseline_product_type() - if self.properties.get('insarStackId') in [None, 'NA', 0, '0']: - raise ASFBaselineError(f'Requested reference product needs a baseline stack ID but does not have one: {self.properties["fileID"]}') + if self.properties.get("insarStackId") in [None, "NA", 0, "0"]: + raise ASFBaselineError( + 'Requested reference product needs a baseline stack ID ' + f'but does not have one: {self.properties["fileID"]}' + ) - stack_opts.insarStackId = self.properties['insarStackId'] + stack_opts.insarStackId = self.properties["insarStackId"] return stack_opts @staticmethod def get_property_paths() -> Dict: return { **ASFProduct.get_property_paths(), - **ASFStackableProduct._base_properties + **ASFStackableProduct._base_properties, } def is_valid_reference(self): - # we don't stack at all if any of stack is missing insarBaseline, unlike stacking S1 products(?) - if 'insarBaseline' not in self.baseline: - raise ValueError('No baseline values available for precalculated dataset') + # we don't stack at all if any of stack is missing insarBaseline, + # unlike stacking S1 products(?) + if "insarBaseline" not in self.baseline: + raise ValueError("No baseline values available for precalculated dataset") return True diff --git a/asf_search/CMR/MissionList.py b/asf_search/CMR/MissionList.py index 47d77235..27c7b1f2 100644 --- a/asf_search/CMR/MissionList.py +++ b/asf_search/CMR/MissionList.py @@ -1,4 +1,4 @@ -from typing import Dict, List +from typing import Dict from asf_search.exceptions import CMRError from asf_search.constants.INTERNAL import CMR_HOST, CMR_COLLECTIONS_PATH @@ -6,7 +6,7 @@ def get_campaigns(data) -> Dict: - """Queries CMR Collections endpoint for + """Queries CMR Collections endpoint for collections associated with the given platform :param data: a dictionary with required keys: @@ -14,14 +14,13 @@ def get_campaigns(data) -> Dict: :return: Dictionary containing CMR umm_json response """ - response = requests.post(f'https://{CMR_HOST}{CMR_COLLECTIONS_PATH}', - data=data) + response = requests.post(f"https://{CMR_HOST}{CMR_COLLECTIONS_PATH}", data=data) if response.status_code != 200: - raise CMRError(f'CMR_ERROR {response.status_code}: {response.text}') + raise CMRError(f"CMR_ERROR {response.status_code}: {response.text}") try: data = response.json() except Exception as e: - raise CMRError(f'CMR_ERROR: Error parsing JSON from CMR: {e}') + raise CMRError(f"CMR_ERROR: Error parsing JSON from CMR: {e}") return data diff --git a/asf_search/CMR/__init__.py b/asf_search/CMR/__init__.py index 50690b77..7eac60e0 100644 --- a/asf_search/CMR/__init__.py +++ b/asf_search/CMR/__init__.py @@ -1,5 +1,11 @@ -from .MissionList import get_campaigns -from .subquery import build_subqueries -from .translate import translate_opts -from .field_map import field_map -from .datasets import dataset_collections, collections_per_platform, collections_by_processing_level, get_concept_id_alias, get_dataset_concept_ids +from .MissionList import get_campaigns # noqa: F401 +from .subquery import build_subqueries # noqa: F401 +from .translate import translate_opts # noqa: F401 +from .field_map import field_map # noqa: F401 +from .datasets import ( # noqa: F401 + dataset_collections, # noqa: F401 + collections_per_platform, # noqa: F401 + collections_by_processing_level, # noqa: F401 + get_concept_id_alias, # noqa: F401 + get_dataset_concept_ids, # noqa: F401 +) diff --git a/asf_search/CMR/datasets.py b/asf_search/CMR/datasets.py index 6fc80eff..8c825ac6 100644 --- a/asf_search/CMR/datasets.py +++ b/asf_search/CMR/datasets.py @@ -6,202 +6,202 @@ "NISAR_NEN_RRST_BETA_V1": [ "C1261815181-ASFDEV", "C1261815288-ASF", - "C2850220296-ASF" + "C2850220296-ASF", ], "NISAR_NEN_RRST_PROVISIONAL_V1": [ "C1261832381-ASFDEV", "C1261832657-ASF", - "C2853068083-ASF" + "C2853068083-ASF", ], "NISAR_NEN_RRST_V1": [ "C1256533420-ASFDEV", "C1257349121-ASF", - "C2727902012-ASF" + "C2727902012-ASF", ], "NISAR_L0A_RRST_BETA_V1": [ "C1261813453-ASFDEV", "C1261815147-ASF", - "C2850223384-ASF" + "C2850223384-ASF", ], "NISAR_L0A_RRST_PROVISIONAL_V1": [ "C1261832466-ASFDEV", "C1261832658-ASF", - "C2853086824-ASF" + "C2853086824-ASF", ], "NISAR_L0A_RRST_V1": [ "C1256524081-ASFDEV", "C1257349120-ASF", - "C2727901263-ASF" + "C2727901263-ASF", ], "NISAR_L0B_RRSD_BETA_V1": [ "C1261815274-ASFDEV", "C1261815289-ASF", - "C2850224301-ASF" + "C2850224301-ASF", ], "NISAR_L0B_RRSD_PROVISIONAL_V1": [ "C1261832497-ASFDEV", "C1261832659-ASF", - "C2853089814-ASF" + "C2853089814-ASF", ], "NISAR_L0B_RRSD_V1": [ "C1256358262-ASFDEV", "C1257349115-ASF", - "C2727901639-ASF" + "C2727901639-ASF", ], "NISAR_L0B_CRSD_BETA_V1": [ "C1261815276-ASFDEV", "C1261815301-ASF", - "C2850225137-ASF" + "C2850225137-ASF", ], "NISAR_L0B_CRSD_PROVISIONAL_V1": [ "C1261832632-ASFDEV", "C1261832671-ASF", - "C2853091612-ASF" + "C2853091612-ASF", ], "NISAR_L0B_CRSD_V1": [ "C1256358463-ASFDEV", "C1257349114-ASF", - "C2727901523-ASF" + "C2727901523-ASF", ], "NISAR_L1_RSLC_BETA_V1": [ "C1261813489-ASFDEV", "C1261815148-ASF", - "C2850225585-ASF" + "C2850225585-ASF", ], "NISAR_L1_RSLC_PROVISIONAL_V1": [ "C1261832868-ASFDEV", "C1261833052-ASF", - "C2853145197-ASF" + "C2853145197-ASF", ], "NISAR_L1_RSLC_V1": [ "C1256363301-ASFDEV", "C1257349109-ASF", - "C2727900439-ASF" + "C2727900439-ASF", ], "NISAR_L1_RIFG_BETA_V1": [ "C1261819086-ASFDEV", "C1261819120-ASF", - "C2850234202-ASF" + "C2850234202-ASF", ], "NISAR_L1_RIFG_PROVISIONAL_V1": [ "C1261832940-ASFDEV", "C1261833063-ASF", - "C2853147928-ASF" + "C2853147928-ASF", ], "NISAR_L1_RIFG_V1": [ "C1256381769-ASFDEV", "C1257349108-ASF", - "C2723110181-ASF" + "C2723110181-ASF", ], "NISAR_L1_RUNW_BETA_V1": [ "C1261819098-ASFDEV", "C1261819121-ASF", - "C2850235455-ASF" + "C2850235455-ASF", ], "NISAR_L1_RUNW_PROVISIONAL_V1": [ "C1261832990-ASFDEV", "C1261833064-ASF", - "C2853153429-ASF" + "C2853153429-ASF", ], "NISAR_L1_RUNW_V1": [ "C1256420738-ASFDEV", "C1257349107-ASF", - "C2727900827-ASF" + "C2727900827-ASF", ], "NISAR_L1_ROFF_BETA_V1": [ "C1261819110-ASFDEV", "C1261819145-ASF", - "C2850237619-ASF" + "C2850237619-ASF", ], "NISAR_L1_ROFF_PROVISIONAL_V1": [ "C1261832993-ASFDEV", "C1261833076-ASF", - "C2853156054-ASF" + "C2853156054-ASF", ], "NISAR_L1_ROFF_V1": [ "C1256411631-ASFDEV", "C1257349103-ASF", - "C2727900080-ASF" + "C2727900080-ASF", ], "NISAR_L2_GSLC_BETA_V1": [ "C1261819167-ASFDEV", "C1261819258-ASF", - "C2850259510-ASF" + "C2850259510-ASF", ], "NISAR_L2_GSLC_PROVISIONAL_V1": [ "C1261833024-ASFDEV", "C1261833127-ASF", - "C2854332392-ASF" + "C2854332392-ASF", ], "NISAR_L2_GSLC_V1": [ "C1256413628-ASFDEV", "C1257349102-ASF", - "C2727896667-ASF" + "C2727896667-ASF", ], "NISAR_L2_GUNW_BETA_V1": [ "C1261819168-ASFDEV", "C1261819270-ASF", - "C2850261892-ASF" + "C2850261892-ASF", ], "NISAR_L2_GUNW_PROVISIONAL_V1": [ "C1261833025-ASFDEV", "C1261846741-ASF", - "C2854335566-ASF" + "C2854335566-ASF", ], "NISAR_L2_GUNW_V1": [ "C1256432264-ASFDEV", "C1257349096-ASF", - "C2727897718-ASF" + "C2727897718-ASF", ], "NISAR_L2_GCOV_BETA_V1": [ "C1261819211-ASFDEV", "C1261819275-ASF", - "C2850262927-ASF" + "C2850262927-ASF", ], "NISAR_L2_GCOV_PROVISIONAL_V1": [ "C1261833026-ASFDEV", "C1261846880-ASF", - "C2854338529-ASF" + "C2854338529-ASF", ], "NISAR_L2_GCOV_V1": [ "C1256477304-ASFDEV", "C1257349095-ASF", - "C2727896018-ASF" + "C2727896018-ASF", ], "NISAR_L2_GOFF_BETA_V1": [ "C1261819233-ASFDEV", "C1261819281-ASF", - "C2850263910-ASF" + "C2850263910-ASF", ], "NISAR_L2_GOFF_PROVISIONAL_V1": [ "C1261833027-ASFDEV", "C1261846994-ASF", - "C2854341702-ASF" + "C2854341702-ASF", ], "NISAR_L2_GOFF_V1": [ "C1256479237-ASFDEV", "C1257349094-ASF", - "C2727896460-ASF" + "C2727896460-ASF", ], "NISAR_L3_SME2_BETA_V1": [ "C1261819245-ASFDEV", "C1261819282-ASF", - "C2850265000-ASF" + "C2850265000-ASF", ], "NISAR_L3_SME2_PROVISIONAL_V1": [ "C1261833050-ASFDEV", "C1261847095-ASF", - "C2854344945-ASF" + "C2854344945-ASF", ], "NISAR_L3_SME2_V1": [ "C1256568692-ASFDEV", "C1257349093-ASF", - "C2727894546-ASF" + "C2727894546-ASF", ], "NISAR_CUSTOM_PROVISIONAL_V1": [ "C1262134528-ASFDEV", "C1262135006-ASF", - "C2874824964-ASF" + "C2874824964-ASF", ], }, "SENTINEL-1": { @@ -433,7 +433,7 @@ "C1595765183-ASF", "C1225776659-ASF", ], - "ARIA_S1_GUNW": ["C2859376221-ASF", "C1261881077-ASF"] + "ARIA_S1_GUNW": ["C2859376221-ASF", "C1261881077-ASF"], }, "SMAP": { "SPL1A_RO_METADATA_003": ["C1243122884-ASF", "C1233103964-ASF"], @@ -940,7 +940,7 @@ "C1210599503-ASF", "C1210599673-ASF", ], - "NISAR": [ + "NISAR": [ # UAT ASFDEV "C1261815181-ASFDEV", "C1261832381-ASFDEV", @@ -1063,7 +1063,7 @@ "C2850265000-ASF", "C2854344945-ASF", "C2727894546-ASF", - "C2874824964-ASF" + "C2874824964-ASF", ], } @@ -1400,7 +1400,7 @@ "STOKES": ["C1214419355-ASF", "C1210599673-ASF"], } -#################### Helper Methods #################### +# Helper Methods def get_concept_id_alias(param_list: List[str], collections_dict: dict) -> List[str]: @@ -1408,8 +1408,10 @@ def get_concept_id_alias(param_list: List[str], collections_dict: dict) -> List[ param: param_list (List[str]): list of search values to alias param: collections_dict (dict): The search value to concept-id dictionary to read from - returns List[str]: Returns a list of concept-ids that correspond to the given list of search values - If any of the search values are not keys in the collections_dict, this will instead returns an empty list. + returns List[str]: Returns a list of concept-ids + that correspond to the given list of search values + If any of the search values are not keys in the collections_dict, + this will instead returns an empty list. """ concept_id_aliases = [] for param in param_list: diff --git a/asf_search/CMR/field_map.py b/asf_search/CMR/field_map.py index 0754b4fd..810cf15c 100644 --- a/asf_search/CMR/field_map.py +++ b/asf_search/CMR/field_map.py @@ -10,9 +10,9 @@ 'campaign': {'key': 'attribute[]', 'fmt': 'string,MISSION_NAME,{0}'}, 'maxDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,,{0}'}, 'minDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,{0},'}, - 'maxFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,,{0}'}, - 'minFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,{0},'}, - 'flightDirection': {'key': 'attribute[]', 'fmt': 'string,ASCENDING_DESCENDING,{0}'}, + 'maxFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,,{0}'}, # noqa F401 + 'minFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,{0},'}, # noqa F401 + 'flightDirection': {'key': 'attribute[]', 'fmt': 'string,ASCENDING_DESCENDING,{0}'}, # noqa F401 'flightLine': {'key': 'attribute[]', 'fmt': 'string,FLIGHT_LINE,{0}'}, 'frame': {'key': 'attribute[]', 'fmt': 'int,CENTER_ESA_FRAME,{0}'}, 'granule_list': {'key': 'readable_granule_name[]', 'fmt': '{0}'}, @@ -36,8 +36,8 @@ 'temporal': {'key': 'temporal', 'fmt': '{0}'}, 'collections': {'key': 'echo_collection_id[]', 'fmt': '{0}'}, 'shortName': {'key': 'shortName', 'fmt': '{0}'}, - 'temporalBaselineDays': {'key': 'attribute[]', 'fmt': 'int,TEMPORAL_BASELINE_DAYS,{0}'}, - + 'temporalBaselineDays': {'key': 'attribute[]', 'fmt': 'int,TEMPORAL_BASELINE_DAYS,{0}'}, # noqa F401 + # SLC BURST fields 'absoluteBurstID': {'key': 'attribute[]', 'fmt': 'int,BURST_ID_ABSOLUTE,{0}'}, 'relativeBurstID': {'key': 'attribute[]', 'fmt': 'int,BURST_ID_RELATIVE,{0}'}, @@ -45,4 +45,4 @@ # OPERA-S1 field 'operaBurstID': {'key': 'attribute[]', 'fmt': 'string,OPERA_BURST_ID,{0}'}, -} \ No newline at end of file +} diff --git a/asf_search/CMR/subquery.py b/asf_search/CMR/subquery.py index dfff6133..8d1ab7a3 100644 --- a/asf_search/CMR/subquery.py +++ b/asf_search/CMR/subquery.py @@ -1,16 +1,23 @@ -from typing import List, Optional, Tuple +from typing import List, Tuple import itertools from copy import copy from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.constants import CMR_PAGE_SIZE -from asf_search.CMR.datasets import collections_by_processing_level, collections_per_platform, dataset_collections, get_concept_id_alias, get_dataset_concept_ids +from asf_search.CMR.datasets import ( + collections_by_processing_level, + collections_per_platform, + get_concept_id_alias, + get_dataset_concept_ids, +) from numpy import intersect1d, union1d + def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: """ - Build a list of sub-queries using the cartesian product of all the list parameters described by opts + Build a list of sub-queries using the cartesian product + of all the list parameters described by opts :param opts: The search options to split into sub-queries :return list: A list of ASFSearchOptions objects @@ -18,30 +25,44 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: params = dict(opts) # Break out two big list offenders into manageable chunks - for chunked_key in ['granule_list', 'product_list']: + for chunked_key in ["granule_list", "product_list"]: if params.get(chunked_key) is not None: params[chunked_key] = chunk_list(params[chunked_key], CMR_PAGE_SIZE) - list_param_names = ['platform', 'season', 'collections', 'dataset', 'cmr_keywords', 'shortName'] # these parameters will dodge the subquery system - skip_param_names = ['maxResults']# these params exist in opts, but shouldn't be passed on to subqueries at ALL - - collections, aliased_keywords = get_keyword_concept_ids(params, opts.collectionAlias) - params['collections'] = list(union1d(collections, params.get('collections', []))) - + list_param_names = [ + "platform", + "season", + "collections", + "dataset", + "cmr_keywords", + "shortName", + ] # these parameters will dodge the subquery system + skip_param_names = [ + "maxResults", + ] # these params exist in opts, but shouldn't be passed on to subqueries at ALL + + collections, aliased_keywords = get_keyword_concept_ids( + params, opts.collectionAlias + ) + params["collections"] = list(union1d(collections, params.get("collections", []))) + for keyword in [*skip_param_names, *aliased_keywords]: params.pop(keyword, None) - + subquery_params, list_params = {}, {} for key, value in params.items(): if key in list_param_names: list_params[key] = value else: subquery_params[key] = value - + sub_queries = cartesian_product(subquery_params) return [_build_subquery(query, opts, list_params) for query in sub_queries] -def _build_subquery(query: List[Tuple[dict]], opts: ASFSearchOptions, list_params: dict) -> ASFSearchOptions: + +def _build_subquery( + query: List[Tuple[dict]], opts: ASFSearchOptions, list_params: dict +) -> ASFSearchOptions: """ Composes query dict and list params into new ASFSearchOptions object @@ -52,24 +73,24 @@ def _build_subquery(query: List[Tuple[dict]], opts: ASFSearchOptions, list_param q = dict() for p in query: q.update(p) - - q['provider'] = opts.provider - q['host'] = opts.host - q['session'] = copy(opts.session) - - return ASFSearchOptions( - **q, - **list_params - ) -def get_keyword_concept_ids(params: dict, use_collection_alias: bool=True) -> dict: + q["provider"] = opts.provider + q["host"] = opts.host + q["session"] = copy(opts.session) + + return ASFSearchOptions(**q, **list_params) + + +def get_keyword_concept_ids(params: dict, use_collection_alias: bool = True) -> dict: """ Gets concept-ids for dataset, platform, processingLevel keywords processingLevel is scoped by dataset or platform concept-ids when available - : param params: search parameter dictionary pre-CMR translation - : param use_collection_alias: whether or not to alias platform and processingLevel with concept-ids - : returns two lists: + : param params: + search parameter dictionary pre-CMR translation + : param use_collection_alias: + whether or not to alias platform and processingLevel with concept-ids + : returns two lists: - list of concept-ids for dataset, platform, and processingLevel - list of aliased keywords to remove from final parameters """ @@ -77,37 +98,43 @@ def get_keyword_concept_ids(params: dict, use_collection_alias: bool=True) -> di aliased_keywords = [] if use_collection_alias: - if 'processingLevel' in params.keys(): - collections = get_concept_id_alias(params.get('processingLevel'), collections_by_processing_level) + if "processingLevel" in params.keys(): + collections = get_concept_id_alias( + params.get("processingLevel"), collections_by_processing_level + ) if len(collections): - aliased_keywords.append('processingLevel') + aliased_keywords.append("processingLevel") - if 'platform' in params.keys(): + if "platform" in params.keys(): platform_concept_ids = get_concept_id_alias( - [platform.upper() for platform in params.get('platform')], - collections_per_platform - ) + [platform.upper() for platform in params.get("platform")], + collections_per_platform, + ) if len(platform_concept_ids): - aliased_keywords.append('platform') + aliased_keywords.append("platform") collections = _get_intersection(platform_concept_ids, collections) - if 'dataset' in params.keys(): - aliased_keywords.append('dataset') - dataset_concept_ids = get_dataset_concept_ids(params.get('dataset')) + if "dataset" in params.keys(): + aliased_keywords.append("dataset") + dataset_concept_ids = get_dataset_concept_ids(params.get("dataset")) collections = _get_intersection(dataset_concept_ids, collections) - + return collections, aliased_keywords -def _get_intersection(keyword_concept_ids: List[str], intersecting_ids: List[str]) -> List[str]: + +def _get_intersection( + keyword_concept_ids: List[str], intersecting_ids: List[str] +) -> List[str]: """ Returns the intersection between two lists. If the second list is empty the first list is return unchaged """ if len(intersecting_ids): return list(intersect1d(intersecting_ids, keyword_concept_ids)) - + return keyword_concept_ids - + + def chunk_list(source: List, n: int) -> List: """ Breaks a longer list into a list of lists, each of length n @@ -117,7 +144,7 @@ def chunk_list(source: List, n: int) -> List: :return List[List, ...]: """ - return [source[i * n:(i + 1) * n] for i in range((len(source) + n - 1) // n)] + return [source[i * n: (i + 1) * n] for i in range((len(source) + n - 1) // n)] def cartesian_product(params): @@ -146,10 +173,8 @@ def translate_param(param_name, param_val) -> List[dict]: formatted_val = unformatted_val if isinstance(unformatted_val, list): - formatted_val = ','.join([f'{t}' for t in unformatted_val]) + formatted_val = ",".join([f"{t}" for t in unformatted_val]) - param_list.append({ - param_name: formatted_val - }) + param_list.append({param_name: formatted_val}) return param_list diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index 7f6973f5..63508366 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -26,7 +26,7 @@ def translate_opts(opts: ASFSearchOptions) -> List: # Special case to unravel WKT field a little for compatibility if "intersectsWith" in dict_opts: - shape = wkt.loads(dict_opts.pop('intersectsWith', None)) + shape = wkt.loads(dict_opts.pop("intersectsWith", None)) # If a wide rectangle is provided, make sure to use the bounding box # instead of the wkt for better responses from CMR @@ -34,75 +34,85 @@ def translate_opts(opts: ASFSearchOptions) -> List: if should_use_bbox(shape): bounds = shape.boundary.bounds if bounds[0] > 180 or bounds[2] > 180: - bounds = [(x + 180) % 360 - 180 if idx % 2 == 0 and abs(x) > 180 else x for idx, x in enumerate(bounds)] + bounds = [ + (x + 180) % 360 - 180 if idx % 2 == 0 and abs(x) > 180 else x + for idx, x in enumerate(bounds) + ] bottom_left = [str(coord) for coord in bounds[:2]] top_right = [str(coord) for coord in bounds[2:]] - bbox = ','.join([*bottom_left, *top_right]) - dict_opts['bbox'] = bbox + bbox = ",".join([*bottom_left, *top_right]) + dict_opts["bbox"] = bbox else: - (shapeType, shape) = wkt_to_cmr_shape(shape).split(':') + (shapeType, shape) = wkt_to_cmr_shape(shape).split(":") dict_opts[shapeType] = shape # If you need to use the temporal key: - if any(key in dict_opts for key in ['start', 'end', 'season']): + if any(key in dict_opts for key in ["start", "end", "season"]): dict_opts = fix_date(dict_opts) - + # convert the above parameters to a list of key/value tuples cmr_opts = [] # user provided umm fields - custom_cmr_keywords = dict_opts.pop('cmr_keywords', []) + custom_cmr_keywords = dict_opts.pop("cmr_keywords", []) - for (key, val) in dict_opts.items(): + for key, val in dict_opts.items(): # If it's "session" or something else CMR doesn't accept, don't send it: if key not in field_map: continue if isinstance(val, list): for x in val: - if key in ['granule_list', 'product_list']: - for y in x.split(','): + if key in ["granule_list", "product_list"]: + for y in x.split(","): cmr_opts.append((key, y)) else: if isinstance(x, tuple): - cmr_opts.append((key, ','.join([str(t) for t in x]))) + cmr_opts.append((key, ",".join([str(t) for t in x]))) else: cmr_opts.append((key, x)) else: cmr_opts.append((key, val)) # translate the above tuples to CMR key/values for i, opt in enumerate(cmr_opts): - cmr_opts[i] = field_map[opt[0]]['key'], field_map[opt[0]]['fmt'].format(opt[1]) + cmr_opts[i] = field_map[opt[0]]["key"], field_map[opt[0]]["fmt"].format(opt[1]) if should_use_asf_frame(cmr_opts): - cmr_opts = use_asf_frame(cmr_opts) + cmr_opts = use_asf_frame(cmr_opts) cmr_opts.extend(custom_cmr_keywords) additional_keys = [ - ('page_size', CMR_PAGE_SIZE), - ('options[temporal][and]', 'true'), - ('sort_key[]', '-end_date'), - ('sort_key[]', 'granule_ur'), - ('options[platform][ignore_case]', 'true'), - ('provider', opts.provider), + ("page_size", CMR_PAGE_SIZE), + ("options[temporal][and]", "true"), + ("sort_key[]", "-end_date"), + ("sort_key[]", "granule_ur"), + ("options[platform][ignore_case]", "true"), + ("provider", opts.provider), ] - + cmr_opts.extend(additional_keys) return cmr_opts -def should_use_asf_frame(cmr_opts): - asf_frame_platforms = ['SENTINEL-1A', 'SENTINEL-1B', 'ALOS'] - - asf_frame_collections = get_concept_id_alias(asf_frame_platforms, collections_per_platform) - return any([ - p[0] == 'platform[]' and p[1].upper() in asf_frame_platforms - or p[0] == 'echo_collection_id[]' and p[1] in asf_frame_collections - for p in cmr_opts - ]) +def should_use_asf_frame(cmr_opts): + asf_frame_platforms = ["SENTINEL-1A", "SENTINEL-1B", "ALOS"] + + asf_frame_collections = get_concept_id_alias( + asf_frame_platforms, collections_per_platform + ) + + return any( + [ + p[0] == "platform[]" + and p[1].upper() in asf_frame_platforms + or p[0] == "echo_collection_id[]" + and p[1] in asf_frame_collections + for p in cmr_opts + ] + ) def use_asf_frame(cmr_opts): @@ -118,41 +128,40 @@ def use_asf_frame(cmr_opts): if not isinstance(p[1], str): continue - m = re.search(r'CENTER_ESA_FRAME', p[1]) + m = re.search(r"CENTER_ESA_FRAME", p[1]) if m is None: continue - logging.debug( - 'Sentinel/ALOS subquery, using ASF frame instead of ESA frame' - ) + logging.debug("Sentinel/ALOS subquery, using ASF frame instead of ESA frame") + + cmr_opts[n] = (p[0], p[1].replace(",CENTER_ESA_FRAME,", ",FRAME_NUMBER,")) - cmr_opts[n] = ( - p[0], - p[1].replace(',CENTER_ESA_FRAME,', ',FRAME_NUMBER,') - ) - return cmr_opts + # some products don't have integer values in BYTES fields, round to nearest int def try_round_float(value: str) -> Optional[int]: if value is None: return None - + value = float(value) return round(value) + def try_parse_int(value: str) -> Optional[int]: if value is None: return None - + return int(value) + def try_parse_float(value: str) -> Optional[float]: if value is None: return None - + return float(value) + def try_parse_date(value: str) -> Optional[str]: if value is None: return None @@ -161,7 +170,7 @@ def try_parse_date(value: str) -> Optional[str]: date = ciso8601.parse_datetime(value) except ValueError: return None - + if date is None: return value @@ -169,21 +178,34 @@ def try_parse_date(value: str) -> Optional[str]: date = date.replace(tzinfo=timezone.utc) # Turn all inputs into a consistant format: - return date.strftime('%Y-%m-%dT%H:%M:%SZ') + return date.strftime("%Y-%m-%dT%H:%M:%SZ") + def fix_date(fixed_params: Dict[str, Any]): - if 'start' in fixed_params or 'end' in fixed_params or 'season' in fixed_params: - fixed_params["start"] = fixed_params["start"] if "start" in fixed_params else "1978-01-01T00:00:00Z" - fixed_params["end"] = fixed_params["end"] if "end" in fixed_params else datetime.utcnow().isoformat() - fixed_params["season"] = ','.join(str(x) for x in fixed_params['season']) if "season" in fixed_params else "" + if "start" in fixed_params or "end" in fixed_params or "season" in fixed_params: + fixed_params["start"] = ( + fixed_params["start"] if "start" in fixed_params else "1978-01-01T00:00:00Z" + ) + fixed_params["end"] = ( + fixed_params["end"] + if "end" in fixed_params + else datetime.utcnow().isoformat() + ) + fixed_params["season"] = ( + ",".join(str(x) for x in fixed_params["season"]) + if "season" in fixed_params + else "" + ) - fixed_params['temporal'] = f'{fixed_params["start"]},{fixed_params["end"]},{fixed_params["season"]}' + fixed_params["temporal"] = ( + f'{fixed_params["start"]},{fixed_params["end"]},{fixed_params["season"]}' + ) # And a little cleanup - fixed_params.pop('start', None) - fixed_params.pop('end', None) - fixed_params.pop('season', None) - + fixed_params.pop("start", None) + fixed_params.pop("end", None) + fixed_params.pop("season", None) + return fixed_params @@ -195,28 +217,29 @@ def should_use_bbox(shape: BaseGeometry): """ if isinstance(shape, Polygon): coords = [ - [shape.bounds[0], shape.bounds[1]], + [shape.bounds[0], shape.bounds[1]], [shape.bounds[2], shape.bounds[1]], [shape.bounds[2], shape.bounds[3]], [shape.bounds[0], shape.bounds[3]], ] return shape.equals(Polygon(shell=coords)) - + return False def wkt_to_cmr_shape(shape: BaseGeometry): # take note of the WKT type - if shape.geom_type not in ["Point","LineString", "Polygon"]: - raise ValueError('Unsupported WKT: {0}.'.format(shape.wkt)) - + if shape.geom_type not in ["Point", "LineString", "Polygon"]: + raise ValueError("Unsupported WKT: {0}.".format(shape.wkt)) + if shape.geom_type == "Polygon": coords = shape.exterior.coords - else: # type == Point | Linestring + else: # type == Point | Linestring coords = shape.coords # Turn [[x,y],[x,y]] into [x,y,x,y]: lon_lat_sequence = [] - for lon_lat in coords: lon_lat_sequence.extend(lon_lat) + for lon_lat in coords: + lon_lat_sequence.extend(lon_lat) # Turn any "6e8" to a literal number. (As a sting): - coords = ['{:.16f}'.format(float(cord)) for cord in lon_lat_sequence] - return '{0}:{1}'.format(shape.geom_type.lower(), ','.join(coords)) + coords = ["{:.16f}".format(float(cord)) for cord in lon_lat_sequence] + return "{0}:{1}".format(shape.geom_type.lower(), ",".join(coords)) diff --git a/asf_search/Products/AIRSARProduct.py b/asf_search/Products/AIRSARProduct.py index 54c2c03c..6353ba06 100644 --- a/asf_search/Products/AIRSARProduct.py +++ b/asf_search/Products/AIRSARProduct.py @@ -1,25 +1,30 @@ -import copy from typing import Dict from asf_search import ASFSession, ASFProduct -from asf_search.CMR.translate import try_parse_float, try_parse_int +from asf_search.CMR.translate import try_parse_int + class AIRSARProduct(ASFProduct): """ ASF Dataset Overview Page: https://asf.alaska.edu/data-sets/sar-data-sets/airsar/ """ + _base_properties = { - 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, - 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, - 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, - 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, + "frameNumber": { + "path": ["AdditionalAttributes", ("Name", "CENTER_ESA_FRAME"), "Values", 0], + "cast": try_parse_int, + }, + "groupID": { + "path": ["AdditionalAttributes", ("Name", "GROUP_ID"), "Values", 0] + }, + "insarStackId": { + "path": ["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0] + }, + "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - + @staticmethod def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **AIRSARProduct._base_properties - } + return {**ASFProduct.get_property_paths(), **AIRSARProduct._base_properties} diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index 9f31011b..e04e2cf8 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -1,5 +1,5 @@ from typing import Dict, Union -from asf_search import ASFSession, ASFProduct, ASFStackableProduct, ASFSearchOptions +from asf_search import ASFSession, ASFStackableProduct from asf_search.CMR.translate import try_parse_float, try_parse_int, try_round_float from asf_search.constants import PRODUCT_TYPE @@ -10,19 +10,34 @@ class ALOSProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/ """ + _base_properties = { - 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, - 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, - 'offNadirAngle': {'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0], 'cast': try_parse_float}, - 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, - 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + "frameNumber": { + "path": ["AdditionalAttributes", ("Name", "FRAME_NUMBER"), "Values", 0], + "cast": try_parse_int, + }, + "faradayRotation": { + "path": ["AdditionalAttributes", ("Name", "FARADAY_ROTATION"), "Values", 0], + "cast": try_parse_float, + }, + "offNadirAngle": { + "path": ["AdditionalAttributes", ("Name", "OFF_NADIR_ANGLE"), "Values", 0], + "cast": try_parse_float, + }, + "bytes": { + "path": ["AdditionalAttributes", ("Name", "BYTES"), "Values", 0], + "cast": try_round_float, + }, + "insarStackId": { + "path": ["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0] + }, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - if self.properties.get('groupID') is None: - self.properties['groupID'] = self.properties['sceneName'] + if self.properties.get("groupID") is None: + self.properties["groupID"] = self.properties["sceneName"] @staticmethod def get_default_baseline_product_type() -> Union[str, None]: @@ -35,5 +50,5 @@ def get_default_baseline_product_type() -> Union[str, None]: def get_property_paths() -> Dict: return { **ASFStackableProduct.get_property_paths(), - **ALOSProduct._base_properties + **ALOSProduct._base_properties, } diff --git a/asf_search/Products/ARIAS1GUNWProduct.py b/asf_search/Products/ARIAS1GUNWProduct.py index ab477bfc..9bef2dd2 100644 --- a/asf_search/Products/ARIAS1GUNWProduct.py +++ b/asf_search/Products/ARIAS1GUNWProduct.py @@ -10,46 +10,61 @@ class ARIAS1GUNWProduct(S1Product): """ Used for ARIA S1 GUNW Products - ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/derived-data-sets/sentinel-1-interferograms/ + ASF Dataset Documentation Page: + https://asf.alaska.edu/data-sets/derived-data-sets/sentinel-1-interferograms/ """ + _base_properties = { - 'perpendicularBaseline': {'path': ['AdditionalAttributes', ('Name', 'PERPENDICULAR_BASELINE'), 'Values', 0], 'cast': try_parse_float}, - 'orbit': {'path': ['OrbitCalculatedSpatialDomains']}, - 'inputGranules': {'path': ['InputGranules']}, - 'ariaVersion': {'path': ['AdditionalAttributes', ('Name', 'VERSION'), 'Values', 0]} + "perpendicularBaseline": { + "path": [ + "AdditionalAttributes", + ("Name", "PERPENDICULAR_BASELINE"), + "Values", + 0, + ], + "cast": try_parse_float, + }, + "orbit": {"path": ["OrbitCalculatedSpatialDomains"]}, + "inputGranules": {"path": ["InputGranules"]}, + "ariaVersion": { + "path": ["AdditionalAttributes", ("Name", "VERSION"), "Values", 0] + }, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - self.properties['orbit'] = [orbit['OrbitNumber'] for orbit in self.properties['orbit']] + self.properties["orbit"] = [ + orbit["OrbitNumber"] for orbit in self.properties["orbit"] + ] + + urls = self.umm_get( + self.umm, "RelatedUrls", ("Type", [("USE SERVICE API", "URL")]), 0 + ) - urls = self.umm_get(self.umm, 'RelatedUrls', ('Type', [('USE SERVICE API', 'URL')]), 0) - - self.properties['additionalUrls'] = [] + self.properties["additionalUrls"] = [] if urls is not None: - self.properties['url'] = urls[0] - self.properties['fileName'] = self.properties['fileID'] + '.' + urls[0].split('.')[-1] - self.properties['additionalUrls'] = urls[1:] + self.properties["url"] = urls[0] + self.properties["fileName"] = ( + self.properties["fileID"] + "." + urls[0].split(".")[-1] + ) + self.properties["additionalUrls"] = urls[1:] @staticmethod def get_property_paths() -> Dict: - return { - **S1Product.get_property_paths(), - **ARIAS1GUNWProduct._base_properties - } + return {**S1Product.get_property_paths(), **ARIAS1GUNWProduct._base_properties} def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ Build search options that can be used to find an insar stack for this product - :return: ASFSearchOptions describing appropriate options for building a stack from this product + :return: ASFSearchOptions describing appropriate options + for building a stack from this product """ return None - def is_valid_reference(self): return False - + @staticmethod def get_default_baseline_product_type() -> None: """ @@ -59,9 +74,15 @@ def get_default_baseline_product_type() -> None: @staticmethod def _is_subclass(item: Dict) -> bool: - platform = ASFProduct.umm_get(item['umm'], 'Platforms', 0, 'ShortName') - if platform in ['SENTINEL-1A', 'SENTINEL-1B']: - asf_platform = ASFProduct.umm_get(item['umm'], 'AdditionalAttributes', ('Name', 'ASF_PLATFORM'), 'Values', 0) - return 'Sentinel-1 Interferogram' in asf_platform + platform = ASFProduct.umm_get(item["umm"], "Platforms", 0, "ShortName") + if platform in ["SENTINEL-1A", "SENTINEL-1B"]: + asf_platform = ASFProduct.umm_get( + item["umm"], + "AdditionalAttributes", + ("Name", "ASF_PLATFORM"), + "Values", + 0, + ) + return "Sentinel-1 Interferogram" in asf_platform return False diff --git a/asf_search/Products/ERSProduct.py b/asf_search/Products/ERSProduct.py index a2dbff98..26a6bfd3 100644 --- a/asf_search/Products/ERSProduct.py +++ b/asf_search/Products/ERSProduct.py @@ -1,5 +1,5 @@ from typing import Dict, Union -from asf_search import ASFSearchOptions, ASFSession, ASFProduct, ASFStackableProduct +from asf_search import ASFSession, ASFStackableProduct from asf_search.CMR.translate import try_round_float from asf_search.constants import PRODUCT_TYPE @@ -11,13 +11,25 @@ class ERSProduct(ASFStackableProduct): ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ + _base_properties = { - 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]}, - 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, - 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0]}, - 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, - 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, - 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + "frameNumber": { + "path": ["AdditionalAttributes", ("Name", "FRAME_NUMBER"), "Values", 0] + }, + "bytes": { + "path": ["AdditionalAttributes", ("Name", "BYTES"), "Values", 0], + "cast": try_round_float, + }, + "esaFrame": { + "path": ["AdditionalAttributes", ("Name", "CENTER_ESA_FRAME"), "Values", 0] + }, + "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, + "beamModeType": { + "path": ["AdditionalAttributes", ("Name", "BEAM_MODE_TYPE"), "Values", 0] + }, + "insarStackId": { + "path": ["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0] + }, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @@ -27,7 +39,7 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): def get_property_paths() -> Dict: return { **ASFStackableProduct.get_property_paths(), - **ERSProduct._base_properties + **ERSProduct._base_properties, } @staticmethod diff --git a/asf_search/Products/JERSProduct.py b/asf_search/Products/JERSProduct.py index 1963225f..cc282172 100644 --- a/asf_search/Products/JERSProduct.py +++ b/asf_search/Products/JERSProduct.py @@ -1,5 +1,5 @@ from typing import Dict, Union -from asf_search import ASFSearchOptions, ASFSession, ASFProduct, ASFStackableProduct +from asf_search import ASFSession, ASFStackableProduct from asf_search.constants import PRODUCT_TYPE @@ -7,12 +7,21 @@ class JERSProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/jers-1/ """ + _base_properties = { - 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, - 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, - 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, - 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, - 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + "browse": { + "path": ["RelatedUrls", ("Type", [("GET RELATED VISUALIZATION", "URL")])] + }, + "groupID": { + "path": ["AdditionalAttributes", ("Name", "GROUP_ID"), "Values", 0] + }, + "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, + "beamModeType": { + "path": ["AdditionalAttributes", ("Name", "BEAM_MODE_TYPE"), "Values", 0] + }, + "insarStackId": { + "path": ["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0] + }, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @@ -29,5 +38,5 @@ def get_default_baseline_product_type() -> Union[str, None]: def get_property_paths() -> Dict: return { **ASFStackableProduct.get_property_paths(), - **JERSProduct._base_properties + **JERSProduct._base_properties, } diff --git a/asf_search/Products/NISARProduct.py b/asf_search/Products/NISARProduct.py index 819e1eb8..463ca518 100644 --- a/asf_search/Products/NISARProduct.py +++ b/asf_search/Products/NISARProduct.py @@ -1,7 +1,5 @@ from typing import Dict, Tuple, Union from asf_search import ASFSearchOptions, ASFSession, ASFStackableProduct -from asf_search.CMR.translate import try_parse_float, try_parse_int, try_round_float -from asf_search.constants import PRODUCT_TYPE class NISARProduct(ASFStackableProduct): @@ -10,18 +8,17 @@ class NISARProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/ """ - _base_properties = { - 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']} - } + + _base_properties = {"pgeVersion": {"path": ["PGEVersionClass", "PGEVersion"]}} def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - self.properties['additionalUrls'] = self._get_additional_urls() - self.properties['s3Urls'] = self._get_s3_urls() + self.properties["additionalUrls"] = self._get_additional_urls() + self.properties["s3Urls"] = self._get_s3_urls() - if self.properties.get('groupID') is None: - self.properties['groupID'] = self.properties['sceneName'] + if self.properties.get("groupID") is None: + self.properties["groupID"] = self.properties["sceneName"] @staticmethod def get_default_baseline_product_type() -> Union[str, None]: @@ -32,26 +29,27 @@ def get_default_baseline_product_type() -> Union[str, None]: def is_valid_reference(self): return False - + def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ Build search options that can be used to find an insar stack for this product - :return: ASFSearchOptions describing appropriate options for building a stack from this product + :return: ASFSearchOptions describing appropriate options + for building a stack from this product """ return None - + @staticmethod def get_property_paths() -> Dict: return { **ASFStackableProduct.get_property_paths(), - **NISARProduct._base_properties + **NISARProduct._base_properties, } def get_sort_keys(self) -> Tuple[str, str]: keys = super().get_sort_keys() - - if keys[0] == '': - return (self._read_property('processingDate', ''), keys[1]) + + if keys[0] == "": + return (self._read_property("processingDate", ""), keys[1]) return keys diff --git a/asf_search/Products/OPERAS1Product.py b/asf_search/Products/OPERAS1Product.py index 67055875..f6fa4aaa 100644 --- a/asf_search/Products/OPERAS1Product.py +++ b/asf_search/Products/OPERAS1Product.py @@ -8,53 +8,102 @@ class OPERAS1Product(S1Product): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/opera/ """ + _base_properties = { - 'centerLat': {'path': []}, # Opera products lacks these fields - 'centerLon': {'path': []}, - 'frameNumber': {'path': []}, - 'operaBurstID': {'path': ['AdditionalAttributes', ('Name', 'OPERA_BURST_ID'), 'Values', 0]}, - 'validityStartDate': {'path': ['TemporalExtent', 'SingleDateTime'], 'cast': try_parse_date}, - 'bytes': {'path': ['DataGranule', 'ArchiveAndDistributionInformation']}, - 'subswath': {'path': ['AdditionalAttributes', ('Name', 'SUBSWATH_NAME'), 'Values', 0]}, - 'polarization': {'path': ['AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values']} # dual polarization is in list rather than a 'VV+VH' style format + "centerLat": {"path": []}, # Opera products lacks these fields + "centerLon": {"path": []}, + "frameNumber": {"path": []}, + "operaBurstID": { + "path": ["AdditionalAttributes", ("Name", "OPERA_BURST_ID"), "Values", 0] + }, + "validityStartDate": { + "path": ["TemporalExtent", "SingleDateTime"], + "cast": try_parse_date, + }, + "bytes": {"path": ["DataGranule", "ArchiveAndDistributionInformation"]}, + "subswath": { + "path": ["AdditionalAttributes", ("Name", "SUBSWATH_NAME"), "Values", 0] + }, + "polarization": { + "path": ["AdditionalAttributes", ("Name", "POLARIZATION"), "Values"] + }, # dual polarization is in list rather than a 'VV+VH' style format } - _subclass_concept_ids = { 'C1257995185-ASF', 'C1257995186-ASF', 'C1258354200-ASF', 'C1258354201-ASF', 'C1259974840-ASF', 'C1259976861-ASF', 'C1259981910-ASF', 'C1259982010-ASF', 'C2777436413-ASF', 'C2777443834-ASF', 'C2795135174-ASF', 'C2795135668-ASF','C1260721853-ASF', 'C1260721945-ASF', 'C2803501097-ASF', 'C2803501758-ASF' } + _subclass_concept_ids = { + "C1257995185-ASF", + "C1257995186-ASF", + "C1258354200-ASF", + "C1258354201-ASF", + "C1259974840-ASF", + "C1259976861-ASF", + "C1259981910-ASF", + "C1259982010-ASF", + "C2777436413-ASF", + "C2777443834-ASF", + "C2795135174-ASF", + "C2795135668-ASF", + "C1260721853-ASF", + "C1260721945-ASF", + "C2803501097-ASF", + "C2803501758-ASF", + } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) self.baseline = None - self.properties['beamMode'] = self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'BEAM_MODE'), 'Values', 0) + self.properties["beamMode"] = self.umm_get( + self.umm, "AdditionalAttributes", ("Name", "BEAM_MODE"), "Values", 0 + ) - self.properties['additionalUrls'] = self._get_additional_urls() + self.properties["additionalUrls"] = self._get_additional_urls() - self.properties['operaBurstID'] = self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'OPERA_BURST_ID'), 'Values', 0) - self.properties['bytes'] = {entry['Name']: {'bytes': entry['SizeInBytes'], 'format': entry['Format']} for entry in self.properties['bytes']} + self.properties["operaBurstID"] = self.umm_get( + self.umm, "AdditionalAttributes", ("Name", "OPERA_BURST_ID"), "Values", 0 + ) + self.properties["bytes"] = { + entry["Name"]: {"bytes": entry["SizeInBytes"], "format": entry["Format"]} + for entry in self.properties["bytes"] + } center = self.centroid() - self.properties['centerLat'] = center.y - self.properties['centerLon'] = center.x - - self.properties.pop('frameNumber') - - if (processingLevel := self.properties['processingLevel']) in ['RTC', 'RTC-STATIC']: - self.properties['bistaticDelayCorrection'] = self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'BISTATIC_DELAY_CORRECTION'), 'Values', 0) - if processingLevel == 'RTC': - self.properties['noiseCorrection'] = self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'NOISE_CORRECTION'), 'Values', 0) - self.properties['postProcessingFilter'] = self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'POST_PROCESSING_FILTER'), 'Values', 0) - - def get_stack_opts(self, opts: ASFSearchOptions = ASFSearchOptions()) -> ASFSearchOptions: - return opts + self.properties["centerLat"] = center.y + self.properties["centerLon"] = center.x + + self.properties.pop("frameNumber") + + if (processingLevel := self.properties["processingLevel"]) in [ + "RTC", + "RTC-STATIC", + ]: + self.properties["bistaticDelayCorrection"] = self.umm_get( + self.umm, + "AdditionalAttributes", + ("Name", "BISTATIC_DELAY_CORRECTION"), + "Values", + 0, + ) + if processingLevel == "RTC": + self.properties["noiseCorrection"] = self.umm_get( + self.umm, + "AdditionalAttributes", + ("Name", "NOISE_CORRECTION"), + "Values", + 0, + ) + self.properties["postProcessingFilter"] = self.umm_get( + self.umm, + "AdditionalAttributes", + ("Name", "POST_PROCESSING_FILTER"), + "Values", + 0, + ) @staticmethod def get_property_paths() -> Dict: - return { - **S1Product.get_property_paths(), - **OPERAS1Product._base_properties - } - + return {**S1Product.get_property_paths(), **OPERAS1Product._base_properties} + @staticmethod def get_default_baseline_product_type() -> None: """ @@ -69,21 +118,22 @@ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ Build search options that can be used to find an insar stack for this product - :return: ASFSearchOptions describing appropriate options for building a stack from this product + :return: ASFSearchOptions describing appropriate options + for building a stack from this product """ return None def get_sort_keys(self) -> Tuple[str, str]: keys = super().get_sort_keys() - if keys[0] == '': - return (self._read_property('validityStartDate', ''), keys[1]) + if keys[0] == "": + return (self._read_property("validityStartDate", ""), keys[1]) return keys @staticmethod def _is_subclass(item: Dict) -> bool: - # not all umm products have this field set, + # not all umm products have this field set, # but when it's available it's convenient for fast matching - concept_id = item['meta'].get('collection-concept-id') + concept_id = item["meta"].get("collection-concept-id") return concept_id in OPERAS1Product._subclass_concept_ids diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 7db7f1b2..aca090e8 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -1,5 +1,5 @@ from typing import Dict, Union -from asf_search import ASFSearchOptions, ASFSession, ASFProduct, ASFStackableProduct +from asf_search import ASFSession, ASFStackableProduct from asf_search.CMR.translate import try_parse_float from asf_search.constants import PRODUCT_TYPE @@ -8,11 +8,19 @@ class RADARSATProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/radarsat-1/ """ + _base_properties = { - 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, - 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, - 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, - 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + "faradayRotation": { + "path": ["AdditionalAttributes", ("Name", "FARADAY_ROTATION"), "Values", 0], + "cast": try_parse_float, + }, + "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, + "beamModeType": { + "path": ["AdditionalAttributes", ("Name", "BEAM_MODE_TYPE"), "Values", 0] + }, + "insarStackId": { + "path": ["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0] + }, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @@ -22,7 +30,7 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): def get_property_paths() -> Dict: return { **ASFStackableProduct.get_property_paths(), - **RADARSATProduct._base_properties + **RADARSATProduct._base_properties, } @staticmethod diff --git a/asf_search/Products/S1BurstProduct.py b/asf_search/Products/S1BurstProduct.py index f4f7a249..fdca6132 100644 --- a/asf_search/Products/S1BurstProduct.py +++ b/asf_search/Products/S1BurstProduct.py @@ -6,86 +6,129 @@ from asf_search.CMR.translate import try_parse_int from asf_search.constants import PRODUCT_TYPE + class S1BurstProduct(S1Product): """ S1Product Subclass made specifically for Sentinel-1 SLC-BURST products - + Key features/properties: - - `properties['burst']` contains SLC-BURST Specific fields such as `fullBurstID` and `burstIndex` + - `properties['burst']` contains SLC-BURST Specific fields + such as `fullBurstID` and `burstIndex` - `properties['additionalUrls']` contains BURST-XML url - SLC-BURST specific stacking params - ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/ + ASF Dataset Documentation Page: + https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/ """ + _base_properties = { - 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]}, - 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int}, - 'relativeBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_RELATIVE'), 'Values', 0], 'cast': try_parse_int}, - 'fullBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_FULL'), 'Values', 0]}, - 'burstIndex': {'path': ['AdditionalAttributes', ('Name', 'BURST_INDEX'), 'Values', 0], 'cast': try_parse_int}, - 'samplesPerBurst': {'path': ['AdditionalAttributes', ('Name', 'SAMPLES_PER_BURST'), 'Values', 0], 'cast': try_parse_int}, - 'subswath': {'path': ['AdditionalAttributes', ('Name', 'SUBSWATH_NAME'), 'Values', 0]}, - 'azimuthTime': {'path': ['AdditionalAttributes', ('Name', 'AZIMUTH_TIME'), 'Values', 0], 'cast': try_parse_date}, - 'azimuthAnxTime': {'path': ['AdditionalAttributes', ('Name', 'AZIMUTH_ANX_TIME'), 'Values', 0]}, + "bytes": { + "path": ["AdditionalAttributes", ("Name", "BYTE_LENGTH"), "Values", 0] + }, + "absoluteBurstID": { + "path": [ + "AdditionalAttributes", + ("Name", "BURST_ID_ABSOLUTE"), + "Values", + 0, + ], + "cast": try_parse_int, + }, + "relativeBurstID": { + "path": [ + "AdditionalAttributes", + ("Name", "BURST_ID_RELATIVE"), + "Values", + 0, + ], + "cast": try_parse_int, + }, + "fullBurstID": { + "path": ["AdditionalAttributes", ("Name", "BURST_ID_FULL"), "Values", 0] + }, + "burstIndex": { + "path": ["AdditionalAttributes", ("Name", "BURST_INDEX"), "Values", 0], + "cast": try_parse_int, + }, + "samplesPerBurst": { + "path": [ + "AdditionalAttributes", + ("Name", "SAMPLES_PER_BURST"), + "Values", + 0, + ], + "cast": try_parse_int, + }, + "subswath": { + "path": ["AdditionalAttributes", ("Name", "SUBSWATH_NAME"), "Values", 0] + }, + "azimuthTime": { + "path": ["AdditionalAttributes", ("Name", "AZIMUTH_TIME"), "Values", 0], + "cast": try_parse_date, + }, + "azimuthAnxTime": { + "path": ["AdditionalAttributes", ("Name", "AZIMUTH_ANX_TIME"), "Values", 0] + }, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - self.properties['sceneName'] = self.properties['fileID'] + self.properties["sceneName"] = self.properties["fileID"] - # Gathers burst properties into `burst` specific dict + # Gathers burst properties into `burst` specific dict # rather than properties dict to limit breaking changes - self.properties['burst'] = { - 'absoluteBurstID': self.properties.pop('absoluteBurstID'), - 'relativeBurstID': self.properties.pop('relativeBurstID'), - 'fullBurstID': self.properties.pop('fullBurstID'), - 'burstIndex': self.properties.pop('burstIndex'), - 'samplesPerBurst': self.properties.pop('samplesPerBurst'), - 'subswath': self.properties.pop('subswath'), - 'azimuthTime': self.properties.pop('azimuthTime'), - 'azimuthAnxTime': self.properties.pop('azimuthAnxTime') + self.properties["burst"] = { + "absoluteBurstID": self.properties.pop("absoluteBurstID"), + "relativeBurstID": self.properties.pop("relativeBurstID"), + "fullBurstID": self.properties.pop("fullBurstID"), + "burstIndex": self.properties.pop("burstIndex"), + "samplesPerBurst": self.properties.pop("samplesPerBurst"), + "subswath": self.properties.pop("subswath"), + "azimuthTime": self.properties.pop("azimuthTime"), + "azimuthAnxTime": self.properties.pop("azimuthAnxTime"), } - urls = self.umm_get(self.umm, 'RelatedUrls', ('Type', [('USE SERVICE API', 'URL')]), 0) + urls = self.umm_get( + self.umm, "RelatedUrls", ("Type", [("USE SERVICE API", "URL")]), 0 + ) if urls is not None: - self.properties['url'] = urls[0] - self.properties['fileName'] = self.properties['fileID'] + '.' + urls[0].split('.')[-1] - self.properties['additionalUrls'] = [urls[1]] # xml-metadata url + self.properties["url"] = urls[0] + self.properties["fileName"] = ( + self.properties["fileID"] + "." + urls[0].split(".")[-1] + ) + self.properties["additionalUrls"] = [urls[1]] # xml-metadata url def get_stack_opts(self, opts: ASFSearchOptions = None): """ - Returns the search options asf-search will use internally to build an SLC-BURST baseline stack from - - :param opts: additional criteria for limiting + Returns the search options asf-search will use internally + to build an SLC-BURST baseline stack from + + :param opts: additional criteria for limiting :returns ASFSearchOptions used for build Sentinel-1 SLC-BURST Stack """ - stack_opts = (ASFSearchOptions() if opts is None else copy(opts)) - + stack_opts = ASFSearchOptions() if opts is None else copy(opts) + stack_opts.processingLevel = self.get_default_baseline_product_type() - stack_opts.fullBurstID = self.properties['burst']['fullBurstID'] - stack_opts.polarization = [self.properties['polarization']] + stack_opts.fullBurstID = self.properties["burst"]["fullBurstID"] + stack_opts.polarization = [self.properties["polarization"]] return stack_opts - + @staticmethod def get_property_paths() -> Dict: - return { - **S1Product.get_property_paths(), - **S1BurstProduct._base_properties - } - + return {**S1Product.get_property_paths(), **S1BurstProduct._base_properties} + def _get_additional_filenames_and_urls(self, default_filename: str = None): # Burst XML filenames are just numbers, this makes it more indentifiable if default_filename is None: - default_filename = self.properties['fileName'] - + default_filename = self.properties["fileName"] + file_name = f"{'.'.join(default_filename.split('.')[:-1])}.xml" - - return [(file_name, self.properties['additionalUrls'][0])] - + + return [(file_name, self.properties["additionalUrls"][0])] + @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ Returns the product type to search for when building a baseline stack. """ return PRODUCT_TYPE.BURST - \ No newline at end of file diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index 341b1fd2..987a074b 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -16,10 +16,15 @@ class S1Product(ASFStackableProduct): """ _base_properties = { - 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) - 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, - 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, - 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']}, + "frameNumber": { + "path": ["AdditionalAttributes", ("Name", "FRAME_NUMBER"), "Values", 0], + "cast": try_parse_int, + }, # Sentinel and ALOS product alt for frameNumber (ESA_FRAME) + "groupID": { + "path": ["AdditionalAttributes", ("Name", "GROUP_ID"), "Values", 0] + }, + "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, + "pgeVersion": {"path": ["PGEVersionClass", "PGEVersion"]}, } """ S1 Specific path override @@ -31,8 +36,8 @@ class S1Product(ASFStackableProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - self.properties['s3Urls'] = self._get_s3_urls() - + self.properties["s3Urls"] = self._get_s3_urls() + if self._has_baseline(): self.baseline = self.get_baseline_calc_properties() @@ -40,8 +45,8 @@ def _has_baseline(self) -> bool: baseline = self.get_baseline_calc_properties() return ( - baseline is not None and - None not in baseline['stateVectors']['positions'].values() + baseline is not None + and None not in baseline["stateVectors"]["positions"].values() ) def get_baseline_calc_properties(self) -> Dict: @@ -50,12 +55,14 @@ def get_baseline_calc_properties(self) -> Dict: """ ascendingNodeTime = self.umm_cast( self._parse_timestamp, - self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'ASC_NODE_TIME'), 'Values', 0) + self.umm_get( + self.umm, "AdditionalAttributes", ("Name", "ASC_NODE_TIME"), "Values", 0 + ), ) return { - 'stateVectors': self.get_state_vectors(), - 'ascendingNodeTime': ascendingNodeTime + "stateVectors": self.get_state_vectors(), + "ascendingNodeTime": ascendingNodeTime, } def get_state_vectors(self) -> Dict: @@ -66,20 +73,33 @@ def get_state_vectors(self) -> Dict: positions = {} velocities = {} - sv_pre_position = self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'SV_POSITION_PRE'), 'Values', 0) - sv_post_position = self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'SV_POSITION_POST'), 'Values', 0) - sv_pre_velocity = self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'SV_VELOCITY_PRE'), 'Values', 0) - sv_post_velocity = self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'SV_VELOCITY_POST'), 'Values', 0) + sv_pre_position = self.umm_get( + self.umm, "AdditionalAttributes", ("Name", "SV_POSITION_PRE"), "Values", 0 + ) + sv_post_position = self.umm_get( + self.umm, "AdditionalAttributes", ("Name", "SV_POSITION_POST"), "Values", 0 + ) + sv_pre_velocity = self.umm_get( + self.umm, "AdditionalAttributes", ("Name", "SV_VELOCITY_PRE"), "Values", 0 + ) + sv_post_velocity = self.umm_get( + self.umm, "AdditionalAttributes", ("Name", "SV_VELOCITY_POST"), "Values", 0 + ) - positions['prePosition'], positions['prePositionTime'] = self.umm_cast(self._parse_state_vector, sv_pre_position) - positions['postPosition'], positions['postPositionTime'] = self.umm_cast(self._parse_state_vector, sv_post_position) - velocities['preVelocity'], velocities['preVelocityTime'] = self.umm_cast(self._parse_state_vector, sv_pre_velocity) - velocities['postVelocity'], velocities['postVelocityTime'] = self.umm_cast(self._parse_state_vector, sv_post_velocity) + positions["prePosition"], positions["prePositionTime"] = self.umm_cast( + self._parse_state_vector, sv_pre_position + ) + positions["postPosition"], positions["postPositionTime"] = self.umm_cast( + self._parse_state_vector, sv_post_position + ) + velocities["preVelocity"], velocities["preVelocityTime"] = self.umm_cast( + self._parse_state_vector, sv_pre_velocity + ) + velocities["postVelocity"], velocities["postVelocityTime"] = self.umm_cast( + self._parse_state_vector, sv_post_velocity + ) - return { - 'positions': positions, - 'velocities': velocities - } + return {"positions": positions, "velocities": velocities} def _parse_timestamp(self, timestamp: str) -> Optional[str]: if timestamp is None: @@ -87,34 +107,37 @@ def _parse_timestamp(self, timestamp: str) -> Optional[str]: return try_parse_date(timestamp) - def _parse_state_vector(self, state_vector: str) -> Tuple[Optional[List], Optional[str]]: + def _parse_state_vector( + self, state_vector: str + ) -> Tuple[Optional[List], Optional[str]]: if state_vector is None: return None, None - velocity = [float(val) for val in state_vector.split(',')[:3]] - timestamp = self._parse_timestamp(state_vector.split(',')[-1]) + velocity = [float(val) for val in state_vector.split(",")[:3]] + timestamp = self._parse_timestamp(state_vector.split(",")[-1]) return velocity, timestamp def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ - Returns the search options asf-search will use internally to build an SLC baseline stack from + Returns the search options asf-search will use internally + to build an SLC baseline stack from :param opts: additional criteria for limiting :returns ASFSearchOptions used for build Sentinel-1 SLC Stack """ - stack_opts = (ASFSearchOptions() if opts is None else copy(opts)) + stack_opts = ASFSearchOptions() if opts is None else copy(opts) stack_opts.processingLevel = self.get_default_baseline_product_type() - stack_opts.beamMode = [self.properties['beamModeType']] - stack_opts.flightDirection = self.properties['flightDirection'] - stack_opts.relativeOrbit = [int(self.properties['pathNumber'])] # path + stack_opts.beamMode = [self.properties["beamModeType"]] + stack_opts.flightDirection = self.properties["flightDirection"] + stack_opts.relativeOrbit = [int(self.properties["pathNumber"])] # path stack_opts.platform = [PLATFORM.SENTINEL1A, PLATFORM.SENTINEL1B] - if self.properties['polarization'] in ['HH', 'HH+HV']: - stack_opts.polarization = ['HH', 'HH+HV'] + if self.properties["polarization"] in ["HH", "HH+HV"]: + stack_opts.polarization = ["HH", "HH+HV"] else: - stack_opts.polarization = ['VV', 'VV+VH'] + stack_opts.polarization = ["VV", "VV+VH"] stack_opts.intersectsWith = self.centroid().wkt @@ -124,18 +147,18 @@ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: def get_property_paths() -> Dict: return { **ASFStackableProduct.get_property_paths(), - **S1Product._base_properties + **S1Product._base_properties, } def is_valid_reference(self) -> bool: - keys = ['postPosition', 'postPositionTime', 'prePosition', 'postPositionTime'] + keys = ["postPosition", "postPositionTime", "prePosition", "postPositionTime"] for key in keys: - if self.baseline['stateVectors']['positions'].get(key) is None: + if self.baseline["stateVectors"]["positions"].get(key) is None: return False return True - + @staticmethod def get_default_baseline_product_type() -> str: """ diff --git a/asf_search/Products/SEASATProduct.py b/asf_search/Products/SEASATProduct.py index e726d756..7d4063fe 100644 --- a/asf_search/Products/SEASATProduct.py +++ b/asf_search/Products/SEASATProduct.py @@ -1,16 +1,22 @@ from typing import Dict from asf_search import ASFSession, ASFProduct -from asf_search.CMR.translate import try_parse_float, try_round_float +from asf_search.CMR.translate import try_round_float class SEASATProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/seasat/ """ + _base_properties = { - 'bytes': {'path': [ 'AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, - 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, - 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, + "bytes": { + "path": ["AdditionalAttributes", ("Name", "BYTES"), "Values", 0], + "cast": try_round_float, + }, + "insarStackId": { + "path": ["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0] + }, + "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @@ -18,7 +24,4 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @staticmethod def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **SEASATProduct._base_properties - } + return {**ASFProduct.get_property_paths(), **SEASATProduct._base_properties} diff --git a/asf_search/Products/SIRCProduct.py b/asf_search/Products/SIRCProduct.py index e5e9ad31..05fa95a5 100644 --- a/asf_search/Products/SIRCProduct.py +++ b/asf_search/Products/SIRCProduct.py @@ -1,15 +1,21 @@ from typing import Dict from asf_search import ASFProduct, ASFSession + class SIRCProduct(ASFProduct): """ Dataset Documentation Page: https://eospso.nasa.gov/missions/spaceborne-imaging-radar-c """ + _base_properties = { - 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, - 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, - 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion'] }, - 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, + "groupID": { + "path": ["AdditionalAttributes", ("Name", "GROUP_ID"), "Values", 0] + }, + "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, + "pgeVersion": {"path": ["PGEVersionClass", "PGEVersion"]}, + "beamModeType": { + "path": ["AdditionalAttributes", ("Name", "BEAM_MODE_TYPE"), "Values", 0] + }, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @@ -17,7 +23,4 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @staticmethod def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **SIRCProduct._base_properties - } + return {**ASFProduct.get_property_paths(), **SIRCProduct._base_properties} diff --git a/asf_search/Products/SMAPProduct.py b/asf_search/Products/SMAPProduct.py index f78f00e0..8c04b232 100644 --- a/asf_search/Products/SMAPProduct.py +++ b/asf_search/Products/SMAPProduct.py @@ -1,16 +1,21 @@ -import copy from typing import Dict from asf_search import ASFProduct, ASFSession -from asf_search.CMR.translate import try_parse_float + class SMAPProduct(ASFProduct): """ - ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/ + ASF Dataset Documentation Page: + https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/ """ + _base_properties = { - 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, - 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, - 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, + "groupID": { + "path": ["AdditionalAttributes", ("Name", "GROUP_ID"), "Values", 0] + }, + "insarStackId": { + "path": ["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0] + }, + "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @@ -18,7 +23,4 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @staticmethod def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **SMAPProduct._base_properties - } + return {**ASFProduct.get_property_paths(), **SMAPProduct._base_properties} diff --git a/asf_search/Products/UAVSARProduct.py b/asf_search/Products/UAVSARProduct.py index 73acd812..782e74eb 100644 --- a/asf_search/Products/UAVSARProduct.py +++ b/asf_search/Products/UAVSARProduct.py @@ -1,16 +1,20 @@ -import copy from typing import Dict from asf_search import ASFProduct, ASFSession -from asf_search.CMR.translate import try_parse_float + class UAVSARProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/uavsar/ """ + _base_properties = { - 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, - 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, - 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, + "groupID": { + "path": ["AdditionalAttributes", ("Name", "GROUP_ID"), "Values", 0] + }, + "insarStackId": { + "path": ["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0] + }, + "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @@ -18,7 +22,4 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @staticmethod def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **UAVSARProduct._base_properties - } + return {**ASFProduct.get_property_paths(), **UAVSARProduct._base_properties} diff --git a/asf_search/Products/__init__.py b/asf_search/Products/__init__.py index 7317c7cd..37e22f2b 100644 --- a/asf_search/Products/__init__.py +++ b/asf_search/Products/__init__.py @@ -1,14 +1,14 @@ -from .S1Product import S1Product -from .ALOSProduct import ALOSProduct -from .RADARSATProduct import RADARSATProduct -from .AIRSARProduct import AIRSARProduct -from .ERSProduct import ERSProduct -from .JERSProduct import JERSProduct -from .UAVSARProduct import UAVSARProduct -from .SIRCProduct import SIRCProduct -from .SEASATProduct import SEASATProduct -from .SMAPProduct import SMAPProduct -from .S1BurstProduct import S1BurstProduct -from .OPERAS1Product import OPERAS1Product -from .ARIAS1GUNWProduct import ARIAS1GUNWProduct -from .NISARProduct import NISARProduct \ No newline at end of file +from .S1Product import S1Product # noqa: F401 +from .ALOSProduct import ALOSProduct # noqa: F401 +from .RADARSATProduct import RADARSATProduct # noqa: F401 +from .AIRSARProduct import AIRSARProduct # noqa: F401 +from .ERSProduct import ERSProduct # noqa: F401 +from .JERSProduct import JERSProduct # noqa: F401 +from .UAVSARProduct import UAVSARProduct # noqa: F401 +from .SIRCProduct import SIRCProduct # noqa: F401 +from .SEASATProduct import SEASATProduct # noqa: F401 +from .SMAPProduct import SMAPProduct # noqa: F401 +from .S1BurstProduct import S1BurstProduct # noqa: F401 +from .OPERAS1Product import OPERAS1Product # noqa: F401 +from .ARIAS1GUNWProduct import ARIAS1GUNWProduct # noqa: F401 +from .NISARProduct import NISARProduct # noqa: F401 diff --git a/asf_search/WKT/RepairEntry.py b/asf_search/WKT/RepairEntry.py index b0f5d6f2..8ce05eff 100644 --- a/asf_search/WKT/RepairEntry.py +++ b/asf_search/WKT/RepairEntry.py @@ -2,6 +2,6 @@ class RepairEntry: def __init__(self, report_type: str, report: str) -> None: self.report_type = report_type self.report = report - + def __str__(self) -> str: return f"{self.report_type}: {self.report}" diff --git a/asf_search/WKT/__init__.py b/asf_search/WKT/__init__.py index b3cb6ee8..208d1c7d 100644 --- a/asf_search/WKT/__init__.py +++ b/asf_search/WKT/__init__.py @@ -1,2 +1,2 @@ -from .validate_wkt import validate_wkt -from .RepairEntry import RepairEntry +from .validate_wkt import validate_wkt # noqa: F401 +from .RepairEntry import RepairEntry # noqa: F401 diff --git a/asf_search/WKT/validate_wkt.py b/asf_search/WKT/validate_wkt.py index a22911be..5f3adc1d 100644 --- a/asf_search/WKT/validate_wkt.py +++ b/asf_search/WKT/validate_wkt.py @@ -2,16 +2,23 @@ from typing import Union, Tuple, List from shapely import wkt from shapely.geometry.base import BaseGeometry -from shapely.geometry import Polygon, MultiPolygon, Point, LineString, GeometryCollection +from shapely.geometry import ( + Polygon, + MultiPolygon, + Point, + LineString, + GeometryCollection, +) from shapely.geometry.collection import BaseMultipartGeometry -from shapely.geometry.polygon import orient from shapely.ops import transform, orient, unary_union from .RepairEntry import RepairEntry from asf_search.exceptions import ASFWKTError -def validate_wkt(aoi: Union[str, BaseGeometry]) -> Tuple[BaseGeometry, BaseGeometry, List[RepairEntry]]: +def validate_wkt( + aoi: Union[str, BaseGeometry], +) -> Tuple[BaseGeometry, BaseGeometry, List[RepairEntry]]: """ Param aoi: the WKT string or Shapely Geometry to validate and prepare for the CMR query Validates the given area of interest, and returns a validated and simplified WKT string @@ -28,36 +35,43 @@ def validate_wkt(aoi: Union[str, BaseGeometry]) -> Tuple[BaseGeometry, BaseGeome if not aoi_shape.is_valid and not isinstance(aoi_shape, MultiPolygon): if isinstance(aoi_shape, Polygon): if not aoi_shape.exterior.is_simple: - raise ASFWKTError(f'WKT string: \"{aoi_shape.wkt}\" is a self intersecting polygon') + raise ASFWKTError( + f'WKT string: "{aoi_shape.wkt}" is a self intersecting polygon' + ) - raise ASFWKTError(f'WKT string: \"{aoi_shape.wkt}\" is not a valid WKT string') + raise ASFWKTError( + f'WKT string: "{aoi_shape.wkt}" is not a valid WKT string' + ) if aoi_shape.is_empty: - raise ASFWKTError(f'WKT string: \"{aoi_shape.wkt}\" empty WKT is not a valid AOI') - + raise ASFWKTError(f'WKT string: "{aoi_shape.wkt}" empty WKT is not a valid AOI') + wrapped, unwrapped, reports = _simplify_geometry(aoi_shape) - - return wrapped, unwrapped, [report for report in reports if report != None] + + return wrapped, unwrapped, [report for report in reports if report is not None] def _search_wkt_prep(shape: BaseGeometry): - if isinstance(shape, MultiPolygon) : + if isinstance(shape, MultiPolygon): output = [] for geom in shape.geoms: output.append(orient(Polygon(geom.exterior))) return MultiPolygon(output) - - + if isinstance(shape, Polygon): return orient(Polygon(shape.exterior), sign=1.0) -def _simplify_geometry(geometry: BaseGeometry) -> Tuple[BaseGeometry, BaseGeometry, List[RepairEntry]]: + +def _simplify_geometry( + geometry: BaseGeometry, +) -> Tuple[BaseGeometry, BaseGeometry, List[RepairEntry]]: """ - param geometry: AOI Shapely Geometry to be prepped for CMR + param geometry: AOI Shapely Geometry to be prepped for CMR prepares geometry for CMR by: 1. Flattening any nested multi-part geometry into single collection - 2. clamping latitude +/-90, unwrapping longitude +/-180, removing coordinate dimensions higher than 2 (lon,lat) + 2. clamping latitude +/-90, unwrapping longitude +/-180, + removing coordinate dimensions higher than 2 (lon,lat) 3. Merging any overlapping shapes 4. convex-hulling the remainder into a single shape 4. simplifing until the shape has <= 300 points, with no point closer than 0.00001 @@ -65,37 +79,54 @@ def _simplify_geometry(geometry: BaseGeometry) -> Tuple[BaseGeometry, BaseGeomet returns: geometry prepped for CMR """ flattened = _flatten_multipart_geometry(geometry) - + merged, merge_report = _merge_overlapping_geometry(flattened) convex, convex_report = _get_convex_hull(merged) simplified, simplified_report = _simplify_aoi(convex) reoriented, reorientation_report = _counter_clockwise_reorientation(simplified) wrapped, unwrapped, clamp_report = _get_clamped_and_wrapped_geometry(reoriented) - - dimension_report = RepairEntry( - report_type="'type': 'EXTRA_DIMENSION'", - report="'report': Only 2-Dimensional area of interests are supported (lon/lat), higher dimension coordinates will be ignored" - ) if geometry.has_z else None - if convex_report != None: + dimension_report = ( + RepairEntry( + report_type="'type': 'EXTRA_DIMENSION'", + report="'report': Only 2-Dimensional area of interests are supported (lon/lat), " + "higher dimension coordinates will be ignored", + ) + if geometry.has_z + else None + ) + + if convex_report is not None: merge_report = None - repair_reports = [dimension_report, merge_report, convex_report, *clamp_report, *simplified_report, reorientation_report] + repair_reports = [ + dimension_report, + merge_report, + convex_report, + *clamp_report, + *simplified_report, + reorientation_report, + ] for report in repair_reports: if report is not None: logging.info(f"{report}") - validated_wrapped = transform(lambda x, y, z=None: tuple([round(x, 14), round(y, 14)]), wrapped) - validated_unwrapped = transform(lambda x, y, z=None: tuple([round(x, 14), round(y, 14)]), unwrapped) + validated_wrapped = transform( + lambda x, y, z=None: tuple([round(x, 14), round(y, 14)]), wrapped + ) + validated_unwrapped = transform( + lambda x, y, z=None: tuple([round(x, 14), round(y, 14)]), unwrapped + ) return validated_wrapped, validated_unwrapped, repair_reports def _flatten_multipart_geometry(unflattened_geometry: BaseGeometry) -> BaseGeometry: """ - Recursively flattens nested geometric collections, + Recursively flattens nested geometric collections, guarantees geometric collections have a depth equal to 1. Also ignores any empty shapes in multipart geometry """ + def _recurse_nested_geometry(geometry: BaseGeometry) -> List[BaseGeometry]: output = [] @@ -108,16 +139,18 @@ def _recurse_nested_geometry(geometry: BaseGeometry) -> List[BaseGeometry]: return [geometry] return output - + flattened = _recurse_nested_geometry(unflattened_geometry) return flattened[0] if len(flattened) == 1 else GeometryCollection(flattened) -def _merge_overlapping_geometry(geometry: BaseGeometry) -> Tuple[BaseGeometry, RepairEntry]: +def _merge_overlapping_geometry( + geometry: BaseGeometry, +) -> Tuple[BaseGeometry, RepairEntry]: """ parameter geometry: geometry to merge - Performs a unary union overlapping operation of the input geometry, + Performs a unary union overlapping operation of the input geometry, ensuring geometric collections (multipolygon, multipartgeometry, etc) are simplied as much as possible before the convex-hull step output: merged-overlapping geometry @@ -134,14 +167,28 @@ def _merge_overlapping_geometry(geometry: BaseGeometry) -> Tuple[BaseGeometry, R # if there were non-overlapping shapes if isinstance(merged, BaseMultipartGeometry): unique_shapes = len(merged.geoms) - merged = orient(unary_union(GeometryCollection([geom.convex_hull for geom in merged.geoms]))) + merged = orient( + unary_union( + GeometryCollection([geom.convex_hull for geom in merged.geoms]) + ) + ) if isinstance(merged, BaseMultipartGeometry): if unique_shapes != len(merged.geoms): - merge_report = RepairEntry("'type': 'OVERLAP_MERGE'", f"'report': {unique_shapes - len(merged.geoms)} non-overlapping shapes merged by their convex-hulls") + merge_report = RepairEntry( + "'type': 'OVERLAP_MERGE'", + f"'report': {unique_shapes - len(merged.geoms)} " + 'non-overlapping shapes merged by their convex-hulls', + ) else: - merge_report = RepairEntry("'type': 'OVERLAP_MERGE'", f"'report': {unique_shapes} non-overlapping shapes merged by their convex-hulls") + merge_report = RepairEntry( + "'type': 'OVERLAP_MERGE'", + f"'report': {unique_shapes} non-overlapping shapes merged by their convex-hulls", # noqa F401 + ) else: - merge_report = RepairEntry("'type': 'OVERLAP_MERGE'", f"'report': Overlapping {original_amount} shapes merged into one") + merge_report = RepairEntry( + "'type': 'OVERLAP_MERGE'", + f"'report': Overlapping {original_amount} shapes merged into one", + ) return merged, merge_report @@ -150,13 +197,15 @@ def _merge_overlapping_geometry(geometry: BaseGeometry) -> Tuple[BaseGeometry, R def _counter_clockwise_reorientation(geometry: Union[Point, LineString, Polygon]): """ - param geometry: Shapely geometry to re-orient - Ensures the geometry coordinates are wound counter-clockwise - output: counter-clockwise oriented geometry + param geometry: Shapely geometry to re-orient + Ensures the geometry coordinates are wound counter-clockwise + output: counter-clockwise oriented geometry """ - reoriented_report = RepairEntry("'type': 'REVERSE'", "'report': Reversed polygon winding order") + reoriented_report = RepairEntry( + "'type': 'REVERSE'", "'report': Reversed polygon winding order" + ) reoriented = orient(geometry) - + if isinstance(geometry, Polygon): # if the vertice ordering has changed if reoriented.exterior.is_ccw != geometry.exterior.is_ccw: @@ -165,14 +214,17 @@ def _counter_clockwise_reorientation(geometry: Union[Point, LineString, Polygon] return reoriented, None -def _get_clamped_and_wrapped_geometry(shape: BaseGeometry) -> Tuple[BaseGeometry, BaseGeometry, List[RepairEntry]]: +def _get_clamped_and_wrapped_geometry( + shape: BaseGeometry, +) -> Tuple[BaseGeometry, BaseGeometry, List[RepairEntry]]: """ - param geometry: Shapely geometry to clamp + param geometry: Shapely geometry to clamp Clamps geometry to +/-90 latitude and wraps longitude +/-180 output: clamped shapely geometry """ coords_clamped = 0 coords_wrapped = 0 + def _clamp_lat(x, y, z=None): clamped = _clamp(y) @@ -194,28 +246,32 @@ def _wrap_lon(x, y, z=None): return tuple([wrapped, y]) def _unwrap_lon(x, y, z=None): - unwrapped = x if x >= 0 else x + 360 # This undoes wrapping + unwrapped = x if x >= 0 else x + 360 # This undoes wrapping return tuple([unwrapped, y]) - clamped_lat = transform(_clamp_lat, shape) - + wrapped = transform(_wrap_lon, clamped_lat) - + if wrapped.bounds[2] - wrapped.bounds[0] > 180: unwrapped = transform(_unwrap_lon, wrapped) else: unwrapped = wrapped - - + clampRepairReport = None wrapRepairReport = None if coords_clamped > 0: - clampRepairReport = RepairEntry("'type': 'CLAMP'", f"'report': 'Clamped {coords_clamped} value(s) to +/-90 latitude'") + clampRepairReport = RepairEntry( + "'type': 'CLAMP'", + f"'report': 'Clamped {coords_clamped} value(s) to +/-90 latitude'", + ) if coords_wrapped > 0: - wrapRepairReport = RepairEntry("'type': 'WRAP'", f"'report': 'Wrapped {coords_wrapped} value(s) to +/-180 longitude'") + wrapRepairReport = RepairEntry( + "'type': 'WRAP'", + f"'report': 'Wrapped {coords_wrapped} value(s) to +/-180 longitude'", + ) return (wrapped, unwrapped, [clampRepairReport, wrapRepairReport]) @@ -223,66 +279,95 @@ def _unwrap_lon(x, y, z=None): def _get_convex_hull(geometry: BaseGeometry) -> Tuple[BaseGeometry, RepairEntry]: """ param geometry: geometry to perform possible convex hull operation on - If the given geometry is a collection of geometries, creates a convex-hull encompassing said geometry - output: convex hull of multi-part geometry, or the original single-shaped geometry + If the given geometry is a collection of geometries, + creates a convex-hull encompassing said geometry + output: convex hull of multi-part geometry, or the original single-shaped geometry """ - if geometry.geom_type not in ['MultiPoint', 'MultiLineString', 'MultiPolygon', 'GeometryCollection']: + if geometry.geom_type not in [ + "MultiPoint", + "MultiLineString", + "MultiPolygon", + "GeometryCollection", + ]: return geometry, None - - possible_repair = RepairEntry("'type': 'CONVEX_HULL_INDIVIDUAL'", "'report': 'Unconnected shapes: Convex-hulled each INDIVIDUAL shape to merge them together.'") + + possible_repair = RepairEntry( + "'type': 'CONVEX_HULL_INDIVIDUAL'", + "'report': 'Unconnected shapes: Convex-hulled each INDIVIDUAL shape to merge them together.'", # noqa F401 + ) return geometry.convex_hull, possible_repair -def _simplify_aoi(shape: Union[Polygon, LineString, Point], - threshold: float = 0.004, - max_depth: int = 10, - ) -> Tuple[Union[Polygon, LineString, Point], List[RepairEntry]]: +def _simplify_aoi( + shape: Union[Polygon, LineString, Point], + threshold: float = 0.004, + max_depth: int = 10, +) -> Tuple[Union[Polygon, LineString, Point], List[RepairEntry]]: """ param shape: Shapely geometry to simplify param threshold: point proximity threshold to merge nearby points of geometry with param max_depth: the current depth of the recursive call, defaults to 10 - Recursively simplifies geometry with increasing threshold, and + Recursively simplifies geometry with increasing threshold, and until there are no more than 300 points output: simplified geometry """ repairs = [] - if shape.geom_type == 'Point': + if shape.geom_type == "Point": return shape, repairs - ### Check for very small shapes and collapse accordingly + # Check for very small shapes and collapse accordingly mbr_width = shape.bounds[2] - shape.bounds[0] mbr_height = shape.bounds[3] - shape.bounds[1] # If both pass, it's a tiny box. Turn it to a point if mbr_width <= threshold and mbr_height <= threshold: simplified = shape.centroid - repair = RepairEntry("'type': 'GEOMETRY_SIMPLIFICATION'", - f"'report': 'Shape Collapsed to Point: shape of {_get_shape_coords_len(shape)} simplified to {_get_shape_coords_len(simplified)} with proximity threshold of {threshold}'") + repair = RepairEntry( + "'type': 'GEOMETRY_SIMPLIFICATION'", + "'report': 'Shape Collapsed to Point: " + f"shape of {_get_shape_coords_len(shape)} " + f"simplified to {_get_shape_coords_len(simplified)} " + f"with proximity threshold of {threshold}'", + ) return simplified, [*repairs, repair] # If it's a single line segment, it's already as simple as can be. Don't do anything - elif shape.geom_type == 'LineString' and len(shape.coords) == 2: + elif shape.geom_type == "LineString" and len(shape.coords) == 2: return shape, repairs # Else, check if it's slim enough to become a linestring: elif mbr_width <= threshold: lon = (shape.bounds[2] - shape.bounds[0]) / 2 + shape.bounds[0] simplified = LineString([(lon, shape.bounds[1]), (lon, shape.bounds[3])]) - repair = RepairEntry("'type': 'GEOMETRY_SIMPLIFICATION'", - f"'report': 'Shape Collapsed to Vertical Line: shape of {_get_shape_coords_len(shape)} simplified to {_get_shape_coords_len(simplified)} with proximity threshold of {threshold}'") + repair = RepairEntry( + "'type': 'GEOMETRY_SIMPLIFICATION'", + f"'report': 'Shape Collapsed to Vertical Line: shape of {_get_shape_coords_len(shape)} " + f"simplified to {_get_shape_coords_len(simplified)} " + f"with proximity threshold of {threshold}'", + ) return simplified, [*repairs, repair] elif mbr_height <= threshold: lat = (shape.bounds[3] - shape.bounds[1]) / 2 + shape.bounds[1] simplified = LineString([(shape.bounds[0], lat), (shape.bounds[2], lat)]) - repair = RepairEntry("'type': 'GEOMETRY_SIMPLIFICATION'", - f"'report': 'Shape Collapsed to Horizontal Line: shape of {_get_shape_coords_len(shape)} simplified to {_get_shape_coords_len(simplified)} with proximity threshold of {threshold}'") + repair = RepairEntry( + "'type': 'GEOMETRY_SIMPLIFICATION'", + "'report': 'Shape Collapsed to Horizontal Line: " + f"shape of {_get_shape_coords_len(shape)} simplified " + f"to {_get_shape_coords_len(simplified)} with proximity threshold of {threshold}'", + ) return simplified, [*repairs, repair] - ### Keep taking away points until it's under 300: + # Keep taking away points until it's under 300: for simplify_level in range(0, max_depth): - simplifed = shape.simplify(tolerance=threshold*(1.5**simplify_level)) + simplifed = shape.simplify(tolerance=threshold * (1.5**simplify_level)) coords_length = _get_shape_coords_len(simplifed) if _get_shape_coords_len(shape) != coords_length: - repairs.append(RepairEntry("'type': 'GEOMETRY_SIMPLIFICATION'", f"'report': 'Shape Simplified: shape of {_get_shape_coords_len(shape)} simplified to {coords_length} with proximity threshold of {threshold}'")) + repairs.append( + RepairEntry( + "'type': 'GEOMETRY_SIMPLIFICATION'", + f"'report': 'Shape Simplified: shape of {_get_shape_coords_len(shape)} " + "simplified to {coords_length} with proximity threshold of {threshold}'", + ) + ) if coords_length <= 300: return simplifed, repairs @@ -301,17 +386,17 @@ def _get_shape_coords_len(geometry: BaseGeometry): def _get_shape_coords(geometry: BaseGeometry): """Returns flattened coordinates of input Shapely geometry""" - if geometry.geom_type == 'Polygon': + if geometry.geom_type == "Polygon": return list(geometry.exterior.coords[:-1]) - - if geometry.geom_type == 'LineString': + + if geometry.geom_type == "LineString": return list(geometry.coords) - - if geometry.geom_type == 'Point': + + if geometry.geom_type == "Point": return list(geometry.coords) output = [] - + for geom in geometry.geoms: coords = _get_shape_coords(geom) output = [*output, *coords] diff --git a/asf_search/__init__.py b/asf_search/__init__.py index 4cc55396..4791997a 100644 --- a/asf_search/__init__.py +++ b/asf_search/__init__.py @@ -5,36 +5,48 @@ try: __version__ = version(__name__) except PackageNotFoundError as e: - msg = str('package is not installed!\n' - 'Install in editable/develop mode via (from the top of this repo):\n' - ' python3 -m pip install -e .\n' - 'Or, to just get the version number use:\n' - ' python setup.py --version') + msg = str( + "package is not installed!\n" + "Install in editable/develop mode via (from the top of this repo):\n" + " python3 -m pip install -e .\n" + "Or, to just get the version number use:\n" + " python setup.py --version" + ) print(msg) - ASF_LOGGER.exception(msg) - raise PackageNotFoundError("Install with 'python3 -m pip install -e .' to use") from e + ASF_LOGGER.exception(msg) # type: ignore # noqa: F821 + raise PackageNotFoundError( + "Install with 'python3 -m pip install -e .' to use" + ) from e ASF_LOGGER = logging.getLogger(__name__) # Add null handle so we do nothing by default. It's up to whatever # imports us, if they want logging. ASF_LOGGER.addHandler(logging.NullHandler()) -from .ASFSession import ASFSession -from .ASFProduct import ASFProduct -from .ASFStackableProduct import ASFStackableProduct -from .ASFSearchResults import ASFSearchResults -from .ASFSearchOptions import ASFSearchOptions, validators -from .Products import * -from .exceptions import * -from .constants import BEAMMODE, FLIGHT_DIRECTION, INSTRUMENT, PLATFORM, POLARIZATION, PRODUCT_TYPE, INTERNAL, DATASET -from .exceptions import * -from .health import * -from .search import * -from .download import * -from .CMR import * -from .baseline import * -from .WKT import validate_wkt -from .export import * +from .ASFSession import ASFSession # noqa: F401, E402 +from .ASFProduct import ASFProduct # noqa: F401 E402 +from .ASFStackableProduct import ASFStackableProduct # noqa: F401 E402 +from .ASFSearchResults import ASFSearchResults # noqa: F401 E402 +from .ASFSearchOptions import ASFSearchOptions, validators # noqa: F401 E402 +from .Products import * # noqa: F403 F401 E402 +from .exceptions import * # noqa: F403 F401 E402 +from .constants import ( # noqa: F401 E402 + BEAMMODE, # noqa: F401 E402 + FLIGHT_DIRECTION, # noqa: F401 E402 + INSTRUMENT, # noqa: F401 E402 + PLATFORM, # noqa: F401 E402 + POLARIZATION, # noqa: F401 E402 + PRODUCT_TYPE, # noqa: F401 E402 + INTERNAL, # noqa: F401 E402 + DATASET, # noqa: F401 E402 +) +from .health import * # noqa: F403 F401 E402 +from .search import * # noqa: F403 F401 E402 +from .download import * # noqa: F403 F401 E402 +from .CMR import * # noqa: F403 F401 E402 +from .baseline import * # noqa: F403 F401 E402 +from .WKT import validate_wkt # noqa: F401 E402 +from .export import * # noqa: F403 F401 E402 -REPORT_ERRORS=True +REPORT_ERRORS = True """Enables automatic search error reporting to ASF, send any questions to uso@asf.alaska.edu""" diff --git a/asf_search/baseline/__init__.py b/asf_search/baseline/__init__.py index 57ecb405..49ea5294 100644 --- a/asf_search/baseline/__init__.py +++ b/asf_search/baseline/__init__.py @@ -1,2 +1,2 @@ -from .calc import * -from .stack import * \ No newline at end of file +from .calc import * # noqa: F403 F401 +from .stack import * # noqa: F403 F401 diff --git a/asf_search/baseline/calc.py b/asf_search/baseline/calc.py index faa6442e..76fa3cc6 100644 --- a/asf_search/baseline/calc.py +++ b/asf_search/baseline/calc.py @@ -5,51 +5,60 @@ from ciso8601 import parse_datetime from asf_search import ASFProduct + # WGS84 constants a = 6378137 f = pow((1.0 - 1 / 298.257224), 2) # Technically f is normally considered to just be that 298... part but this is all we ever use, so # pre-calc and cache and call it all f anyhow + def calculate_perpendicular_baselines(reference: str, stack: List[ASFProduct]): for product in stack: baselineProperties = product.baseline - positionProperties = baselineProperties['stateVectors']['positions'] - + positionProperties = baselineProperties["stateVectors"]["positions"] + if len(positionProperties.keys()) == 0: - baselineProperties['noStateVectors'] = True + baselineProperties["noStateVectors"] = True continue - if None in [positionProperties['prePositionTime'], positionProperties['postPositionTime'], positionProperties['prePosition'], positionProperties['postPosition']]: - baselineProperties['noStateVectors'] = True + if None in [ + positionProperties["prePositionTime"], + positionProperties["postPositionTime"], + positionProperties["prePosition"], + positionProperties["postPosition"], + ]: + baselineProperties["noStateVectors"] = True continue - asc_node_time = parse_datetime(baselineProperties['ascendingNodeTime']).timestamp() + asc_node_time = parse_datetime( + baselineProperties["ascendingNodeTime"] + ).timestamp() - start = parse_datetime(product.properties['startTime']).timestamp() - end = parse_datetime(product.properties['stopTime']).timestamp() + start = parse_datetime(product.properties["startTime"]).timestamp() + end = parse_datetime(product.properties["stopTime"]).timestamp() center = start + ((end - start) / 2) - baselineProperties['relative_start_time'] = start - asc_node_time - baselineProperties['relative_center_time'] = center - asc_node_time - baselineProperties['relative_end_time'] = end - asc_node_time + baselineProperties["relative_start_time"] = start - asc_node_time + baselineProperties["relative_center_time"] = center - asc_node_time + baselineProperties["relative_end_time"] = end - asc_node_time - t_pre = parse_datetime(positionProperties['prePositionTime']).timestamp() - t_post = parse_datetime(positionProperties['postPositionTime']).timestamp() - product.baseline['relative_sv_pre_time'] = t_pre - asc_node_time - product.baseline['relative_sv_post_time'] = t_post - asc_node_time + t_pre = parse_datetime(positionProperties["prePositionTime"]).timestamp() + t_post = parse_datetime(positionProperties["postPositionTime"]).timestamp() + product.baseline["relative_sv_pre_time"] = t_pre - asc_node_time + product.baseline["relative_sv_post_time"] = t_post - asc_node_time for product in stack: - # product.properties['granulePosition'] = get_granule_position(reference.properties['centerLat'], reference.properties['centerLon']) - - if product.properties['sceneName'] == reference: + if product.properties["sceneName"] == reference: reference = product - reference.properties['perpendicularBaseline'] = 0 + reference.properties["perpendicularBaseline"] = 0 # Cache these values - reference.baseline['granulePosition'] = get_granule_position(reference.properties['centerLat'], reference.properties['centerLon']) + reference.baseline["granulePosition"] = get_granule_position( + reference.properties["centerLat"], reference.properties["centerLon"] + ) break for secondary in stack: - if secondary.baseline.get('noStateVectors'): - secondary.properties['perpendicularBaseline'] = None + if secondary.baseline.get("noStateVectors"): + secondary.properties["perpendicularBaseline"] = None continue shared_rel_time = get_shared_sv_time(reference, secondary) @@ -57,118 +66,155 @@ def calculate_perpendicular_baselines(reference: str, stack: List[ASFProduct]): reference_shared_pos = get_pos_at_rel_time(reference, shared_rel_time) reference_shared_vel = get_vel_at_rel_time(reference, shared_rel_time) secondary_shared_pos = get_pos_at_rel_time(secondary, shared_rel_time) - #secondary_shared_vel = get_vel_at_rel_time(secondary, shared_rel_time) # unused + # secondary_shared_vel = get_vel_at_rel_time(secondary, shared_rel_time) # unused # need to get sat pos and sat vel at center time - reference.baseline['alongBeamVector'] = get_along_beam_vector(reference_shared_pos, reference.baseline['granulePosition']) - reference.baseline['upBeamVector'] = get_up_beam_vector(reference_shared_vel, reference.baseline['alongBeamVector']) + reference.baseline["alongBeamVector"] = get_along_beam_vector( + reference_shared_pos, reference.baseline["granulePosition"] + ) + reference.baseline["upBeamVector"] = get_up_beam_vector( + reference_shared_vel, reference.baseline["alongBeamVector"] + ) perpendicular_baseline = get_paired_granule_baseline( - reference.baseline['granulePosition'], - reference.baseline['upBeamVector'], - secondary_shared_pos) + reference.baseline["granulePosition"], + reference.baseline["upBeamVector"], + secondary_shared_pos, + ) if abs(perpendicular_baseline) > 100000: perpendicular_baseline = None - secondary.properties['perpendicularBaseline'] = perpendicular_baseline + secondary.properties["perpendicularBaseline"] = perpendicular_baseline return stack + # Convert granule center lat/lon to fixed earth coordinates in meters using WGS84 ellipsoid. def get_granule_position(scene_center_lat, scene_center_lon): lat = radians(float(scene_center_lat)) lon = radians(float(scene_center_lon)) - coslat = cos(lat) # This value gets used a couple times, cache it - sinlat = sin(lat) # This value gets used a couple times, cache it + coslat = cos(lat) # This value gets used a couple times, cache it + sinlat = sin(lat) # This value gets used a couple times, cache it C = 1.0 / (sqrt(pow(coslat, 2) + f * pow(sinlat, 2))) S = f * C aC = a * C - granule_position = np.array([aC * coslat * cos(lon), aC * coslat * sin(lon), a * S * sinlat]) - return(granule_position) + granule_position = np.array( + [aC * coslat * cos(lon), aC * coslat * sin(lon), a * S * sinlat] + ) + return granule_position + # Calculate along beam vector from sat pos and granule pos def get_along_beam_vector(satellite_position, granule_position): along_beam_vector = np.subtract(satellite_position, granule_position) - along_beam_vector = np.divide(along_beam_vector, np.linalg.norm(along_beam_vector)) # normalize - return(along_beam_vector) + along_beam_vector = np.divide( + along_beam_vector, np.linalg.norm(along_beam_vector) + ) # normalize + return along_beam_vector + # Calculate up beam vector from sat velocity and along beam vector def get_up_beam_vector(satellite_velocity, along_beam_vector): up_beam_vector = np.cross(satellite_velocity, along_beam_vector) - up_beam_vector = np.divide(up_beam_vector, np.linalg.norm(up_beam_vector)) # normalize - return(up_beam_vector) + up_beam_vector = np.divide( + up_beam_vector, np.linalg.norm(up_beam_vector) + ) # normalize + return up_beam_vector + # Calculate baseline between reference and paired granule -def get_paired_granule_baseline(reference_granule_position, reference_up_beam_vector, paired_satellite_position): +def get_paired_granule_baseline( + reference_granule_position, reference_up_beam_vector, paired_satellite_position +): posd = np.subtract(paired_satellite_position, reference_granule_position) baseline = np.dot(reference_up_beam_vector, posd) - return(int(round(baseline))) + return int(round(baseline)) + # Find a relative orbit time covered by both granules' SVs def get_shared_sv_time(reference, secondary): - start = max(reference.baseline['relative_sv_pre_time'], secondary.baseline['relative_sv_pre_time']) - end = max(reference.baseline['relative_sv_post_time'], secondary.baseline['relative_sv_post_time']) - - # Favor the start/end SV time of the reference so we can use that SV directly without interpolation - if start == reference.baseline['relative_sv_pre_time']: + start = max( + reference.baseline["relative_sv_pre_time"], + secondary.baseline["relative_sv_pre_time"], + ) + end = max( + reference.baseline["relative_sv_post_time"], + secondary.baseline["relative_sv_post_time"], + ) + + # Favor the start/end SV time of the reference so + # we can use that SV directly without interpolation + if start == reference.baseline["relative_sv_pre_time"]: return start - if end == reference.baseline['relative_sv_post_time']: + if end == reference.baseline["relative_sv_post_time"]: return end return start + # Interpolate a position SV based on relative time def get_pos_at_rel_time(granule: ASFProduct, relative_time): - if relative_time == granule.baseline['relative_sv_pre_time']: - return granule.baseline['stateVectors']['positions']['prePosition'] - if relative_time == granule.baseline['relative_sv_post_time']: - return granule.baseline['stateVectors']['positions']['postPosition'] + if relative_time == granule.baseline["relative_sv_pre_time"]: + return granule.baseline["stateVectors"]["positions"]["prePosition"] + if relative_time == granule.baseline["relative_sv_post_time"]: + return granule.baseline["stateVectors"]["positions"]["postPosition"] - duration = granule.baseline['relative_sv_post_time'] - granule.baseline['relative_sv_pre_time'] - factor = (relative_time - granule.baseline['relative_sv_pre_time']) / duration + duration = ( + granule.baseline["relative_sv_post_time"] - granule.baseline["relative_sv_pre_time"] + ) + factor = (relative_time - granule.baseline["relative_sv_pre_time"]) / duration - vec_a = granule.baseline['stateVectors']['positions']['prePosition'] - vec_b = granule.baseline['stateVectors']['positions']['postPosition'] + vec_a = granule.baseline["stateVectors"]["positions"]["prePosition"] + vec_b = granule.baseline["stateVectors"]["positions"]["postPosition"] v = [ interpolate(vec_a[0], vec_b[0], factor), interpolate(vec_a[1], vec_b[1], factor), - interpolate(vec_a[2], vec_b[2], factor)] + interpolate(vec_a[2], vec_b[2], factor), + ] return radius_fix(granule, v, relative_time) + # Interpolate a velocity SV based on relative time def get_vel_at_rel_time(granule: ASFProduct, relative_time): - velocityProperties = granule.baseline['stateVectors']['velocities'] - if relative_time == granule.baseline['relative_sv_pre_time']: - return velocityProperties['preVelocity'] - if relative_time == granule.baseline['relative_sv_post_time']: - return velocityProperties['postVelocity'] + velocityProperties = granule.baseline["stateVectors"]["velocities"] + if relative_time == granule.baseline["relative_sv_pre_time"]: + return velocityProperties["preVelocity"] + if relative_time == granule.baseline["relative_sv_post_time"]: + return velocityProperties["postVelocity"] - duration = granule.baseline['relative_sv_post_time'] - granule.baseline['relative_sv_pre_time'] - factor = (relative_time - granule.baseline['relative_sv_pre_time']) / duration + duration = ( + granule.baseline["relative_sv_post_time"] - granule.baseline["relative_sv_pre_time"] + ) + factor = (relative_time - granule.baseline["relative_sv_pre_time"]) / duration - vec_a = velocityProperties['preVelocity'] - vec_b = velocityProperties['postVelocity'] + vec_a = velocityProperties["preVelocity"] + vec_b = velocityProperties["postVelocity"] v = [ interpolate(vec_a[0], vec_b[0], factor), interpolate(vec_a[1], vec_b[1], factor), - interpolate(vec_a[2], vec_b[2], factor)] + interpolate(vec_a[2], vec_b[2], factor), + ] return v + # convenience 1d linear interp def interpolate(p0, p1, x): return (p0 * (1.0 - x)) + (p1 * x) + # Bump the provided sat pos out to a radius interpolated between the start and end sat pos vectors def radius_fix(granule: ASFProduct, sat_pos, relative_time): - positionProperties = granule.baseline['stateVectors']['positions'] - pre_l = np.linalg.norm(positionProperties['prePosition']) - post_l = np.linalg.norm(positionProperties['postPosition']) + positionProperties = granule.baseline["stateVectors"]["positions"] + pre_l = np.linalg.norm(positionProperties["prePosition"]) + post_l = np.linalg.norm(positionProperties["postPosition"]) sat_pos_l = np.linalg.norm(sat_pos) - dt = relative_time - granule.baseline['relative_sv_pre_time'] - new_l = pre_l + (post_l - pre_l) * dt / (granule.baseline['relative_sv_post_time'] - granule.baseline['relative_sv_pre_time']) + dt = relative_time - granule.baseline["relative_sv_pre_time"] + new_l = pre_l + (post_l - pre_l) * dt / ( + granule.baseline["relative_sv_post_time"] - granule.baseline["relative_sv_pre_time"] + ) sat_pos[0] = sat_pos[0] * new_l / sat_pos_l sat_pos[1] = sat_pos[1] * new_l / sat_pos_l sat_pos[2] = sat_pos[2] * new_l / sat_pos_l diff --git a/asf_search/baseline/stack.py b/asf_search/baseline/stack.py index c443adae..12606932 100644 --- a/asf_search/baseline/stack.py +++ b/asf_search/baseline/stack.py @@ -1,4 +1,4 @@ -from typing import Tuple, List +from typing import Tuple, List, Union from ciso8601 import parse_datetime import pytz @@ -6,36 +6,52 @@ from asf_search import ASFProduct, ASFStackableProduct, ASFSearchResults -def get_baseline_from_stack(reference: ASFProduct, stack: ASFSearchResults) -> Tuple[ASFSearchResults, List[dict]]: +def get_baseline_from_stack( + reference: ASFProduct, stack: ASFSearchResults +) -> Tuple[ASFSearchResults, List[dict]]: warnings = [] if len(stack) == 0: - raise ValueError('No products found matching stack parameters') - - stack = [product for product in stack if not product.properties['processingLevel'].lower().startswith('metadata') and product.baseline is not None] + raise ValueError("No products found matching stack parameters") + + stack = [ + product + for product in stack + if not product.properties["processingLevel"].lower().startswith("metadata") and + product.baseline is not None + ] reference, stack, reference_warnings = check_reference(reference, stack) - + if reference_warnings is not None: warnings.append(reference_warnings) - stack = calculate_temporal_baselines(reference, stack) if reference.baseline_type == ASFStackableProduct.BaselineCalcType.PRE_CALCULATED: stack = offset_perpendicular_baselines(reference, stack) else: - stack = calculate_perpendicular_baselines(reference.properties['sceneName'], stack) + stack = calculate_perpendicular_baselines( + reference.properties["sceneName"], stack + ) missing_state_vectors = _count_missing_state_vectors(stack) if missing_state_vectors > 0: - warnings.append({'MISSING STATE VECTORS': f'{missing_state_vectors} scenes in stack missing State Vectors, perpendicular baseline not calculated for these scenes'}) - + warnings.append( + { + "MISSING STATE VECTORS": + f'{missing_state_vectors} scenes in stack missing State Vectors, ' + 'perpendicular baseline not calculated for these scenes' + } + ) + return ASFSearchResults(stack), warnings - + + def _count_missing_state_vectors(stack) -> int: - return len([scene for scene in stack if scene.baseline.get('noStateVectors')]) + return len([scene for scene in stack if scene.baseline.get("noStateVectors")]) -def find_new_reference(stack: ASFSearchResults): + +def find_new_reference(stack: ASFSearchResults) -> Union[ASFProduct, None]: for product in stack: if product.is_valid_reference(): return product @@ -44,44 +60,59 @@ def find_new_reference(stack: ASFSearchResults): def check_reference(reference: ASFProduct, stack: ASFSearchResults): warnings = None - if reference.properties['sceneName'] not in [product.properties['sceneName'] for product in stack]: # Somehow the reference we built the stack from is missing?! Just pick one + if reference.properties["sceneName"] not in [ + product.properties["sceneName"] for product in stack + ]: # Somehow the reference we built the stack from is missing?! Just pick one reference = stack[0] - warnings = [{'NEW_REFERENCE': 'A new reference scene had to be selected in order to calculate baseline values.'}] + warnings = [ + { + 'NEW_REFERENCE': + 'A new reference scene had to be selected in order to calculate baseline values.' + } + ] # non-s1 is_valid_reference raise an error, while we try to find a valid s1 reference # do we want this behaviour for pre-calc stacks? if not reference.is_valid_reference(): reference = find_new_reference(stack) - if reference == None: - raise ValueError('No valid state vectors on any scenes in stack, this is fatal') + if reference is None: + raise ValueError( + "No valid state vectors on any scenes in stack, this is fatal" + ) return reference, stack, warnings def calculate_temporal_baselines(reference: ASFProduct, stack: ASFSearchResults): """ - Calculates temporal baselines for a stack of products based on a reference scene and injects those values into the stack. + Calculates temporal baselines for a stack of products based on a reference scene + and injects those values into the stack. :param reference: The reference product from which to calculate temporal baselines. :param stack: The stack to operate on. :return: None, as the operation occurs in-place on the stack provided. """ - reference_time = parse_datetime(reference.properties['startTime']) + reference_time = parse_datetime(reference.properties["startTime"]) if reference_time.tzinfo is None: reference_time = pytz.utc.localize(reference_time) for secondary in stack: - secondary_time = parse_datetime(secondary.properties['startTime']) + secondary_time = parse_datetime(secondary.properties["startTime"]) if secondary_time.tzinfo is None: secondary_time = pytz.utc.localize(secondary_time) - secondary.properties['temporalBaseline'] = (secondary_time.date() - reference_time.date()).days + secondary.properties["temporalBaseline"] = ( + secondary_time.date() - reference_time.date() + ).days return stack + def offset_perpendicular_baselines(reference: ASFProduct, stack: ASFSearchResults): - reference_offset = float(reference.baseline['insarBaseline']) + reference_offset = float(reference.baseline["insarBaseline"]) for product in stack: - product.properties['perpendicularBaseline'] = round(float(product.baseline['insarBaseline']) - reference_offset) + product.properties["perpendicularBaseline"] = round( + float(product.baseline["insarBaseline"]) - reference_offset + ) return stack diff --git a/asf_search/constants/BEAMMODE.py b/asf_search/constants/BEAMMODE.py index 203efce9..0201200c 100644 --- a/asf_search/constants/BEAMMODE.py +++ b/asf_search/constants/BEAMMODE.py @@ -1,47 +1,47 @@ -IW = 'IW' -EW = 'EW' -S1 = 'S1' -S2 = 'S2' -S3 = 'S3' -S4 = 'S4' -S5 = 'S5' -S6 = 'S6' -WV = 'WV' -DSN = 'DSN' -FBS = 'FBS' -FBD = 'FBD' -PLR = 'PLR' -WB1 = 'WB1' -WB2 = 'WB2' -OBS = 'OBS' -SIRC11 = '11' -SIRC13 = '13' -SIRC16 = '16' -SIRC20 = '20' -SLC = 'SLC' -STD = 'STD' -POL = 'POL' -RPI = 'RPI' -EH3 = 'EH3' -EH4 = 'EH4' -EH6 = 'EH6' -EL1 = 'EL1' -FN1 = 'FN1' -FN2 = 'FN2' -FN3 = 'FN3' -FN4 = 'FN4' -FN5 = 'FN5' -SNA = 'SNA' -SNB = 'SNB' -ST1 = 'ST1' -ST2 = 'ST2' -ST3 = 'ST3' -ST4 = 'ST4' -ST5 = 'ST5' -ST6 = 'ST6' -ST7 = 'ST7' -SWA = 'SWA' -SWB = 'SWB' -WD1 = 'WD1' -WD2 = 'WD2' -WD3 = 'WD3' +IW = "IW" +EW = "EW" +S1 = "S1" +S2 = "S2" +S3 = "S3" +S4 = "S4" +S5 = "S5" +S6 = "S6" +WV = "WV" +DSN = "DSN" +FBS = "FBS" +FBD = "FBD" +PLR = "PLR" +WB1 = "WB1" +WB2 = "WB2" +OBS = "OBS" +SIRC11 = "11" +SIRC13 = "13" +SIRC16 = "16" +SIRC20 = "20" +SLC = "SLC" +STD = "STD" +POL = "POL" +RPI = "RPI" +EH3 = "EH3" +EH4 = "EH4" +EH6 = "EH6" +EL1 = "EL1" +FN1 = "FN1" +FN2 = "FN2" +FN3 = "FN3" +FN4 = "FN4" +FN5 = "FN5" +SNA = "SNA" +SNB = "SNB" +ST1 = "ST1" +ST2 = "ST2" +ST3 = "ST3" +ST4 = "ST4" +ST5 = "ST5" +ST6 = "ST6" +ST7 = "ST7" +SWA = "SWA" +SWB = "SWB" +WD1 = "WD1" +WD2 = "WD2" +WD3 = "WD3" diff --git a/asf_search/constants/DATASET.py b/asf_search/constants/DATASET.py index fb705b95..2b894b1d 100644 --- a/asf_search/constants/DATASET.py +++ b/asf_search/constants/DATASET.py @@ -1,16 +1,16 @@ -SENTINEL1 = 'SENTINEL-1' -OPERA_S1 = 'OPERA-S1' -OPERA_S1_CALVAL = 'OPERA-S1-CALVAL' -SLC_BURST = 'SLC-BURST' -ALOS_PALSAR = 'ALOS PALSAR' -ALOS_AVNIR_2 = 'ALOS AVNIR-2' -SIRC = 'SIR-C' -ARIA_S1_GUNW = 'ARIA S1 GUNW' -SMAP = 'SMAP' -UAVSAR = 'UAVSAR' -RADARSAT_1 = 'RADARSAT-1' -ERS = 'ERS' -JERS_1 = 'JERS-1' -AIRSAR = 'AIRSAR' -SEASAT = 'SEASAT' -NISAR = 'NISAR' +SENTINEL1 = "SENTINEL-1" +OPERA_S1 = "OPERA-S1" +OPERA_S1_CALVAL = "OPERA-S1-CALVAL" +SLC_BURST = "SLC-BURST" +ALOS_PALSAR = "ALOS PALSAR" +ALOS_AVNIR_2 = "ALOS AVNIR-2" +SIRC = "SIR-C" +ARIA_S1_GUNW = "ARIA S1 GUNW" +SMAP = "SMAP" +UAVSAR = "UAVSAR" +RADARSAT_1 = "RADARSAT-1" +ERS = "ERS" +JERS_1 = "JERS-1" +AIRSAR = "AIRSAR" +SEASAT = "SEASAT" +NISAR = "NISAR" diff --git a/asf_search/constants/FLIGHT_DIRECTION.py b/asf_search/constants/FLIGHT_DIRECTION.py index c4e942e0..a1ac154b 100644 --- a/asf_search/constants/FLIGHT_DIRECTION.py +++ b/asf_search/constants/FLIGHT_DIRECTION.py @@ -1,2 +1,2 @@ -ASCENDING = 'ASCENDING' -DESCENDING = 'DESCENDING' +ASCENDING = "ASCENDING" +DESCENDING = "DESCENDING" diff --git a/asf_search/constants/INSTRUMENT.py b/asf_search/constants/INSTRUMENT.py index efd19451..437d9a41 100644 --- a/asf_search/constants/INSTRUMENT.py +++ b/asf_search/constants/INSTRUMENT.py @@ -1,3 +1,3 @@ -C_SAR = 'C-SAR' -PALSAR = 'PALSAR' -AVNIR_2 = 'AVNIR-2' +C_SAR = "C-SAR" +PALSAR = "PALSAR" +AVNIR_2 = "AVNIR-2" diff --git a/asf_search/constants/INTERNAL.py b/asf_search/constants/INTERNAL.py index f92b1f3e..844dbc7c 100644 --- a/asf_search/constants/INTERNAL.py +++ b/asf_search/constants/INTERNAL.py @@ -4,9 +4,9 @@ CMR_TIMEOUT = 30 CMR_FORMAT_EXT = 'umm_json' CMR_GRANULE_PATH = f'/search/granules.{CMR_FORMAT_EXT}' -CMR_COLLECTIONS = f'/search/collections' +CMR_COLLECTIONS = '/search/collections' CMR_COLLECTIONS_PATH = f'{CMR_COLLECTIONS}.{CMR_FORMAT_EXT}' -CMR_HEALTH_PATH = f'/search/health' +CMR_HEALTH_PATH = '/search/health' CMR_PAGE_SIZE = 250 EDL_HOST = 'urs.earthdata.nasa.gov' EDL_CLIENT_ID = 'BO_n7nTIlMljdvU6kRRB3g' diff --git a/asf_search/constants/PLATFORM.py b/asf_search/constants/PLATFORM.py index fab0e644..01a40fb5 100644 --- a/asf_search/constants/PLATFORM.py +++ b/asf_search/constants/PLATFORM.py @@ -1,15 +1,15 @@ -SENTINEL1 = 'SENTINEL-1' -SENTINEL1A = 'Sentinel-1A' -SENTINEL1B = 'Sentinel-1B' -SIRC = 'SIR-C' -ALOS = 'ALOS' -ERS = 'ERS' -ERS1 = 'ERS-1' -ERS2 = 'ERS-2' -JERS = 'JERS-1' -RADARSAT = 'RADARSAT-1' -AIRSAR = 'AIRSAR' -SEASAT = 'SEASAT 1' -SMAP = 'SMAP' -UAVSAR = 'UAVSAR' -NISAR = 'NISAR' +SENTINEL1 = "SENTINEL-1" +SENTINEL1A = "Sentinel-1A" +SENTINEL1B = "Sentinel-1B" +SIRC = "SIR-C" +ALOS = "ALOS" +ERS = "ERS" +ERS1 = "ERS-1" +ERS2 = "ERS-2" +JERS = "JERS-1" +RADARSAT = "RADARSAT-1" +AIRSAR = "AIRSAR" +SEASAT = "SEASAT 1" +SMAP = "SMAP" +UAVSAR = "UAVSAR" +NISAR = "NISAR" diff --git a/asf_search/constants/POLARIZATION.py b/asf_search/constants/POLARIZATION.py index 686ea32a..d38e3c9f 100644 --- a/asf_search/constants/POLARIZATION.py +++ b/asf_search/constants/POLARIZATION.py @@ -1,16 +1,16 @@ -HH = 'HH' -VV = 'VV' -VV_VH = 'VV+VH' -HH_HV = 'HH+HV' -DUAL_HH = 'DUAL HH' -DUAL_VV = 'DUAL VV' -DUAL_HV = 'DUAL HV' -DUAL_VH = 'DUAL VH' -HH_3SCAN = 'HH 3SCAN' -HH_4SCAN = 'HH 4SCAN' -HH_5SCAN = 'HH 5SCAN' -QUAD = 'quadrature' -HH_VV = 'HH+VV' -HH_HV_VH_VV = 'HH+HV+VH+VV' -FULL = 'full' -UNKNOWN = 'UNKNOWN' \ No newline at end of file +HH = "HH" +VV = "VV" +VV_VH = "VV+VH" +HH_HV = "HH+HV" +DUAL_HH = "DUAL HH" +DUAL_VV = "DUAL VV" +DUAL_HV = "DUAL HV" +DUAL_VH = "DUAL VH" +HH_3SCAN = "HH 3SCAN" +HH_4SCAN = "HH 4SCAN" +HH_5SCAN = "HH 5SCAN" +QUAD = "quadrature" +HH_VV = "HH+VV" +HH_HV_VH_VV = "HH+HV+VH+VV" +FULL = "full" +UNKNOWN = "UNKNOWN" diff --git a/asf_search/constants/PRODUCT_TYPE.py b/asf_search/constants/PRODUCT_TYPE.py index b6156543..949eb113 100644 --- a/asf_search/constants/PRODUCT_TYPE.py +++ b/asf_search/constants/PRODUCT_TYPE.py @@ -1,29 +1,29 @@ # Sentinel-1 -GRD_HD = 'GRD_HD' -GRD_MD = 'GRD_MD' -GRD_MS = 'GRD_MS' -GRD_HS = 'GRD_HS' -GRD_FD = 'GRD_FD' -SLC = 'SLC' -OCN = 'OCN' -RAW = 'RAW' -METADATA_GRD_HD = 'METADATA_GRD_HD' -METADATA_GRD_MD = 'METADATA_GRD_MD' -METADATA_GRD_MS = 'METADATA_GRD_MS' -METADATA_GRD_HS = 'METADATA_GRD_HS' -METADATA_SLC = 'METADATA_SLC' -METADATA_OCN = 'METADATA_OCN' -METADATA_RAW = 'METADATA_RAW' -BURST = 'BURST' +GRD_HD = "GRD_HD" +GRD_MD = "GRD_MD" +GRD_MS = "GRD_MS" +GRD_HS = "GRD_HS" +GRD_FD = "GRD_FD" +SLC = "SLC" +OCN = "OCN" +RAW = "RAW" +METADATA_GRD_HD = "METADATA_GRD_HD" +METADATA_GRD_MD = "METADATA_GRD_MD" +METADATA_GRD_MS = "METADATA_GRD_MS" +METADATA_GRD_HS = "METADATA_GRD_HS" +METADATA_SLC = "METADATA_SLC" +METADATA_OCN = "METADATA_OCN" +METADATA_RAW = "METADATA_RAW" +BURST = "BURST" # ALOS PALSAR -L1_0 = 'L1.0' -L1_1 = 'L1.1' -L1_5 = 'L1.5' -L2_2 = 'L2.2' -RTC_LOW_RES = 'RTC_LOW_RES' -RTC_HIGH_RES = 'RTC_HI_RES' -KMZ = 'KMZ' +L1_0 = "L1.0" +L1_1 = "L1.1" +L1_5 = "L1.5" +L2_2 = "L2.2" +RTC_LOW_RES = "RTC_LOW_RES" +RTC_HIGH_RES = "RTC_HI_RES" +KMZ = "KMZ" # ALOS AVNIR # No PROCESSING_TYPE attribute in CMR @@ -32,47 +32,47 @@ # SLC and SLC metadata are both 'SLC', provided by Sentinel-1 constants # Sentinel-1 InSAR -GUNW_STD = 'GUNW_STD' -GUNW_AMP = 'GUNW_AMP' -GUNW_CON = 'GUNW_CON' -GUN_COH = 'GUNW_COH' -GUNW_UNW = 'GUNW_UNW' +GUNW_STD = "GUNW_STD" +GUNW_AMP = "GUNW_AMP" +GUNW_CON = "GUNW_CON" +GUN_COH = "GUNW_COH" +GUNW_UNW = "GUNW_UNW" # SMAP -L1A_RADAR_RO_HDF5 = 'L1A_Radar_RO_HDF5' -L1A_RADAR_HDF5 = 'L1A_Radar_HDF5' -L1B_S0_LOW_RES_HDF5 = 'L1B_S0_LoRes_HDF5' -L1C_S0_HIGH_RES_HDF5 = 'L1C_S0_HiRes_HDF5' -L1A_RADAR_RO_QA = 'L1A_Radar_RO_QA' -L1A_RADAR_QA = 'L1A_Radar_QA' -L1B_S0_LOW_RES_QA = 'L1B_S0_LoRes_QA' -L1C_S0_HIGH_RES_QA = 'L1C_S0_HiRes_QA' -L1A_RADAR_RO_ISO_XML = 'L1A_Radar_RO_ISO_XML' -L1B_S0_LOW_RES_ISO_XML = 'L1B_S0_LoRes_ISO_XML' -L1C_S0_HIGH_RES_ISO_XML = 'L1C_S0_HiRes_ISO_XML' +L1A_RADAR_RO_HDF5 = "L1A_Radar_RO_HDF5" +L1A_RADAR_HDF5 = "L1A_Radar_HDF5" +L1B_S0_LOW_RES_HDF5 = "L1B_S0_LoRes_HDF5" +L1C_S0_HIGH_RES_HDF5 = "L1C_S0_HiRes_HDF5" +L1A_RADAR_RO_QA = "L1A_Radar_RO_QA" +L1A_RADAR_QA = "L1A_Radar_QA" +L1B_S0_LOW_RES_QA = "L1B_S0_LoRes_QA" +L1C_S0_HIGH_RES_QA = "L1C_S0_HiRes_QA" +L1A_RADAR_RO_ISO_XML = "L1A_Radar_RO_ISO_XML" +L1B_S0_LOW_RES_ISO_XML = "L1B_S0_LoRes_ISO_XML" +L1C_S0_HIGH_RES_ISO_XML = "L1C_S0_HiRes_ISO_XML" # UAVSAR -AMPLITUDE = 'AMPLITUDE' -STOKES = 'STOKES' -AMPLITUDE_GRD = 'AMPLITUDE_GRD' -PROJECTED = 'PROJECTED' -PROJECTED_ML5X5 = 'PROJECTED_ML5X5' -PROJECTED_ML3X3 = 'PROJECTED_ML3X3' -INTERFEROMETRY_GRD = 'INTERFEROMETRY_GRD' -INTERFEROMETRY = 'INTERFEROMETRY' -COMPLEX = 'COMPLEX' +AMPLITUDE = "AMPLITUDE" +STOKES = "STOKES" +AMPLITUDE_GRD = "AMPLITUDE_GRD" +PROJECTED = "PROJECTED" +PROJECTED_ML5X5 = "PROJECTED_ML5X5" +PROJECTED_ML3X3 = "PROJECTED_ML3X3" +INTERFEROMETRY_GRD = "INTERFEROMETRY_GRD" +INTERFEROMETRY = "INTERFEROMETRY" +COMPLEX = "COMPLEX" # KMZ provided by ALOS PALSAR -INC = 'INC' -SLOPE = 'SLOPE' -DEM_TIFF = 'DEM_TIFF' -PAULI = 'PAULI' -METADATA = 'METADATA' +INC = "INC" +SLOPE = "SLOPE" +DEM_TIFF = "DEM_TIFF" +PAULI = "PAULI" +METADATA = "METADATA" # RADARSAT -L0 = 'L0' -L1 = 'L1' +L0 = "L0" +L1 = "L1" -#ERS +# ERS # L0 provided by RADARSAT # L1 provided by RADARSAT @@ -81,22 +81,22 @@ # L1 provided by RADARSAT # AIRSAR -CTIF = 'CTIF' -PTIF = 'PTIF' -LTIF = 'LTIF' -JPG = 'JPG' -LSTOKES = 'LSTOKES' -PSTOKES = 'PSTOKES' -CSTOKES = 'CSTOKES' -DEM = 'DEM' -THREEFP = '3FP' +CTIF = "CTIF" +PTIF = "PTIF" +LTIF = "LTIF" +JPG = "JPG" +LSTOKES = "LSTOKES" +PSTOKES = "PSTOKES" +CSTOKES = "CSTOKES" +DEM = "DEM" +THREEFP = "3FP" # SEASAT -GEOTIFF = 'GEOTIFF' +GEOTIFF = "GEOTIFF" # L1 provided by RADARSAT # OPERA-S1 -RTC = 'RTC' -CSLC = 'CSLC' -RTC_STATIC = 'RTC-STATIC' -CSLC_STATIC = 'CSLC-STATIC' \ No newline at end of file +RTC = "RTC" +CSLC = "CSLC" +RTC_STATIC = "RTC-STATIC" +CSLC_STATIC = "CSLC-STATIC" diff --git a/asf_search/constants/__init__.py b/asf_search/constants/__init__.py index 3ab520e1..a190d426 100644 --- a/asf_search/constants/__init__.py +++ b/asf_search/constants/__init__.py @@ -1,10 +1,11 @@ -"""Various constants to be used in search and related functions, provided as a convenience to help ensure sensible values.""" +"""Various constants to be used in search and related functions, +provided as a convenience to help ensure sensible values.""" -from .BEAMMODE import * -from .FLIGHT_DIRECTION import * -from .INSTRUMENT import * -from .PLATFORM import * -from .POLARIZATION import * -from .PRODUCT_TYPE import * -from .INTERNAL import * -from .DATASET import * \ No newline at end of file +from .BEAMMODE import * # noqa: F403 F401 +from .FLIGHT_DIRECTION import * # noqa: F403 F401 +from .INSTRUMENT import * # noqa: F403 F401 +from .PLATFORM import * # noqa: F403 F401 +from .POLARIZATION import * # noqa: F403 F401 +from .PRODUCT_TYPE import * # noqa: F403 F401 +from .INTERNAL import * # noqa: F403 F401 +from .DATASET import * # noqa: F403 F401 diff --git a/asf_search/download/__init__.py b/asf_search/download/__init__.py index f07cde41..c38bcd78 100644 --- a/asf_search/download/__init__.py +++ b/asf_search/download/__init__.py @@ -1,2 +1,2 @@ -from .download import download_urls, download_url, remotezip -from .file_download_type import FileDownloadType \ No newline at end of file +from .download import download_urls, download_url, remotezip # noqa: F401 +from .file_download_type import FileDownloadType # noqa: F401 diff --git a/asf_search/download/download.py b/asf_search/download/download.py index a07ffff2..6300fd5b 100644 --- a/asf_search/download/download.py +++ b/asf_search/download/download.py @@ -7,7 +7,7 @@ import warnings from asf_search.exceptions import ASFAuthenticationError, ASFDownloadError -from asf_search import ASF_LOGGER, ASFSession +from asf_search import ASFSession from tenacity import retry, stop_after_delay, retry_if_result, wait_fixed try: @@ -15,15 +15,15 @@ except ImportError: RemoteZip = None + def _download_url(arg): url, path, session = arg - download_url( - url=url, - path=path, - session=session) + download_url(url=url, path=path, session=session) -def download_urls(urls: Iterable[str], path: str, session: ASFSession = None, processes: int = 1): +def download_urls( + urls: Iterable[str], path: str, session: ASFSession = None, processes: int = 1 +): """ Downloads all products from the specified URLs to the specified location. @@ -47,7 +47,9 @@ def download_urls(urls: Iterable[str], path: str, session: ASFSession = None, pr pool.join() -def download_url(url: str, path: str, filename: str = None, session: ASFSession = None) -> None: +def download_url( + url: str, path: str, filename: str = None, session: ASFSession = None +) -> None: """ Downloads a product from the specified URL to the specified location and (optional) filename. @@ -57,61 +59,79 @@ def download_url(url: str, path: str, filename: str = None, session: ASFSession :param session: The session to use, in most cases should be authenticated beforehand :return: """ - + if filename is None: filename = os.path.split(parse.urlparse(url).path)[1] - + if not os.path.isdir(path): - raise ASFDownloadError(f'Error downloading {url}: directory not found: {path}') + raise ASFDownloadError(f"Error downloading {url}: directory not found: {path}") if os.path.isfile(os.path.join(path, filename)): - warnings.warn(f'File already exists, skipping download: {os.path.join(path, filename)}') + warnings.warn( + f"File already exists, skipping download: {os.path.join(path, filename)}" + ) return if session is None: session = ASFSession() response = _try_get_response(session=session, url=url) - - with open(os.path.join(path, filename), 'wb') as f: + + with open(os.path.join(path, filename), "wb") as f: for chunk in response.iter_content(chunk_size=8192): f.write(chunk) -def remotezip(url: str, session: ASFSession) -> 'RemoteZip': + +def remotezip(url: str, session: ASFSession) -> "RemoteZip": # type: ignore # noqa: F821 """ :param url: the url to the zip product :param session: the authenticated ASFSession to read and download from the zip file """ if RemoteZip is None: - raise ImportError("Could not find remotezip package in current python environment. \"remotezip\" is an optional dependency of asf-search required for the `remotezip()` method. Enable by including the appropriate pip or conda install. Ex: `python3 -m pip install asf-search[extras]`") - - session.hooks['response'].append(strip_auth_if_aws) + raise ImportError( + 'Could not find remotezip package in current python environment.' + '"remotezip" is an optional dependency of asf-search required' + 'for the `remotezip()` method.' + 'Enable by including the appropriate pip or conda install.' + 'Ex: `python3 -m pip install asf-search[extras]`' + ) + + session.hooks["response"].append(strip_auth_if_aws) return RemoteZip(url, session=session) + def strip_auth_if_aws(r, *args, **kwargs): - if 300 <= r.status_code <= 399 and 'amazonaws.com' in parse.urlparse(r.headers['location']).netloc: - location = r.headers['location'] + if ( + 300 <= r.status_code <= 399 and + "amazonaws.com" in parse.urlparse(r.headers["location"]).netloc + ): + location = r.headers["location"] r.headers.clear() - r.headers['location'] = location + r.headers["location"] = location -# if it's an unprocessed burst product it'll return a 202 and we'll have to query again + +# if it's an unprocessed burst product it'll return a 202 and we'll have to query again # https://sentinel1-burst-docs.asf.alaska.edu/ def _is_burst_processing(response: Response): return response.status_code == 202 -@retry(reraise=True, - retry=retry_if_result(_is_burst_processing), - wait=wait_fixed(1), - stop=stop_after_delay(90), - ) + +@retry( + reraise=True, + retry=retry_if_result(_is_burst_processing), + wait=wait_fixed(1), + stop=stop_after_delay(90), +) def _try_get_response(session: ASFSession, url: str): - response = session.get(url, stream=True, hooks={'response': strip_auth_if_aws}) + response = session.get(url, stream=True, hooks={"response": strip_auth_if_aws}) try: response.raise_for_status() except HTTPError as e: if 400 <= response.status_code <= 499: - raise ASFAuthenticationError(f'HTTP {e.response.status_code}: {e.response.text}') + raise ASFAuthenticationError( + f"HTTP {e.response.status_code}: {e.response.text}" + ) raise e diff --git a/asf_search/download/file_download_type.py b/asf_search/download/file_download_type.py index d4b0184b..925de134 100644 --- a/asf_search/download/file_download_type.py +++ b/asf_search/download/file_download_type.py @@ -1,5 +1,6 @@ from enum import Enum + class FileDownloadType(Enum): DEFAULT_FILE = 1 ADDITIONAL_FILES = 2 diff --git a/asf_search/exceptions.py b/asf_search/exceptions.py index 8468af0e..abd74407 100644 --- a/asf_search/exceptions.py +++ b/asf_search/exceptions.py @@ -29,14 +29,18 @@ class ASFDownloadError(ASFError): class ASFAuthenticationError(ASFError): """Base download-related Exception""" + class ASFWKTError(ASFError): """Raise when wkt related errors occur""" + class CMRError(Exception): """Base CMR Exception""" + class CMRConceptIDError(CMRError): """Raise when CMR encounters a concept-id error""" + class CMRIncompleteError(CMRError): """Raise when CMR returns an incomplete page of results""" diff --git a/asf_search/export/__init__.py b/asf_search/export/__init__.py index c1673e3e..d878ef44 100644 --- a/asf_search/export/__init__.py +++ b/asf_search/export/__init__.py @@ -1,7 +1,7 @@ -from .export_translators import ASFSearchResults_to_properties_list -from .csv import results_to_csv -from .metalink import results_to_metalink -from .kml import results_to_kml -from .jsonlite import results_to_jsonlite -from .jsonlite2 import results_to_jsonlite2 -from .geojson import results_to_geojson +from .export_translators import ASFSearchResults_to_properties_list # noqa: F401 +from .csv import results_to_csv # noqa: F401 +from .metalink import results_to_metalink # noqa: F401 +from .kml import results_to_kml # noqa: F401 +from .jsonlite import results_to_jsonlite # noqa: F401 +from .jsonlite2 import results_to_jsonlite2 # noqa: F401 +from .geojson import results_to_geojson # noqa: F401 diff --git a/asf_search/export/csv.py b/asf_search/export/csv.py index 575e7320..dbfb64a6 100644 --- a/asf_search/export/csv.py +++ b/asf_search/export/csv.py @@ -6,21 +6,33 @@ import inspect extra_csv_fields = [ - ('sceneDate', ['AdditionalAttributes', ('Name', 'ACQUISITION_DATE'), 'Values', 0]), - ('nearStartLat', ['AdditionalAttributes', ('Name', 'NEAR_START_LAT'), 'Values', 0]), - ('nearStartLon', ['AdditionalAttributes', ('Name', 'NEAR_START_LON'), 'Values', 0]), - ('farStartLat', ['AdditionalAttributes', ('Name', 'FAR_START_LAT'), 'Values', 0]), - ('farStartLon', ['AdditionalAttributes', ('Name', 'FAR_START_LON'), 'Values', 0]), - ('nearEndLat', ['AdditionalAttributes', ('Name', 'NEAR_END_LAT'), 'Values', 0]), - ('nearEndLon', ['AdditionalAttributes', ('Name', 'NEAR_END_LON'), 'Values', 0]), - ('farEndLat', ['AdditionalAttributes', ('Name', 'FAR_END_LAT'), 'Values', 0]), - ('farEndLon', ['AdditionalAttributes', ('Name', 'FAR_END_LON'), 'Values', 0]), - ('faradayRotation', ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0]), - ('configurationName', ['AdditionalAttributes', ('Name', 'BEAM_MODE_DESC'), 'Values', 0]), - ('doppler', ['AdditionalAttributes', ('Name', 'DOPPLER'), 'Values', 0]), - ('sizeMB', ['DataGranule', 'ArchiveAndDistributionInformation', 0, 'Size']), - ('insarStackSize', ['AdditionalAttributes', ('Name', 'INSAR_STACK_SIZE'), 'Values', 0]), - ('offNadirAngle', ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0]) + ("sceneDate", ["AdditionalAttributes", ("Name", "ACQUISITION_DATE"), "Values", 0]), + ("nearStartLat", ["AdditionalAttributes", ("Name", "NEAR_START_LAT"), "Values", 0]), + ("nearStartLon", ["AdditionalAttributes", ("Name", "NEAR_START_LON"), "Values", 0]), + ("farStartLat", ["AdditionalAttributes", ("Name", "FAR_START_LAT"), "Values", 0]), + ("farStartLon", ["AdditionalAttributes", ("Name", "FAR_START_LON"), "Values", 0]), + ("nearEndLat", ["AdditionalAttributes", ("Name", "NEAR_END_LAT"), "Values", 0]), + ("nearEndLon", ["AdditionalAttributes", ("Name", "NEAR_END_LON"), "Values", 0]), + ("farEndLat", ["AdditionalAttributes", ("Name", "FAR_END_LAT"), "Values", 0]), + ("farEndLon", ["AdditionalAttributes", ("Name", "FAR_END_LON"), "Values", 0]), + ( + "faradayRotation", + ["AdditionalAttributes", ("Name", "FARADAY_ROTATION"), "Values", 0], + ), + ( + "configurationName", + ["AdditionalAttributes", ("Name", "BEAM_MODE_DESC"), "Values", 0], + ), + ("doppler", ["AdditionalAttributes", ("Name", "DOPPLER"), "Values", 0]), + ("sizeMB", ["DataGranule", "ArchiveAndDistributionInformation", 0, "Size"]), + ( + "insarStackSize", + ["AdditionalAttributes", ("Name", "INSAR_STACK_SIZE"), "Values", 0], + ), + ( + "offNadirAngle", + ["AdditionalAttributes", ("Name", "OFF_NADIR_ANGLE"), "Values", 0], + ), ] fieldnames = ( @@ -65,17 +77,19 @@ "azimuthTime", "azimuthAnxTime", "samplesPerBurst", - "subswath" + "subswath", ) + def results_to_csv(results): ASF_LOGGER.info("started translating results to csv format") - + if inspect.isgeneratorfunction(results) or isinstance(results, GeneratorType): return CSVStreamArray(results) - + return CSVStreamArray([results]) + class CSVStreamArray(list): def __init__(self, results): self.pages = results @@ -88,8 +102,6 @@ def __len__(self): return self.len def get_additional_output_fields(self, product): - umm = product.umm - additional_fields = {} for key, path in extra_csv_fields: additional_fields[key] = product.umm_get(product.umm, *path) @@ -97,75 +109,93 @@ def get_additional_output_fields(self, product): return additional_fields def streamRows(self): - f = CSVBuffer() - writer = csv.DictWriter(f, quoting=csv.QUOTE_ALL, fieldnames=fieldnames) + writer = csv.DictWriter(f, quoting=csv.QUOTE_ALL, fieldnames=fieldnames) yield writer.writeheader() - + completed = False for page_idx, page in enumerate(self.pages): ASF_LOGGER.info(f"Streaming {len(page)} products from page {page_idx}") completed = page.searchComplete - - properties_list = ASFSearchResults_to_properties_list(page, self.get_additional_output_fields) + + properties_list = ASFSearchResults_to_properties_list( + page, self.get_additional_output_fields + ) yield from [writer.writerow(self.getItem(p)) for p in properties_list] if not completed: - ASF_LOGGER.warn('Failed to download all results from CMR') - - ASF_LOGGER.info('Finished streaming csv results') - + ASF_LOGGER.warn("Failed to download all results from CMR") + + ASF_LOGGER.info("Finished streaming csv results") + def getItem(self, p): return { - "Granule Name":p.get('sceneName'), - "Platform":p.get('platform'), - "Sensor":p.get('sensor'), - "Beam Mode":p.get('beamModeType'), - "Beam Mode Description":p.get('configurationName'), - "Orbit":p.get('orbit'), - "Path Number":p.get('pathNumber'), - "Frame Number":p.get('frameNumber'), - "Acquisition Date":p.get('sceneDate'), - "Processing Date":p.get('processingDate'), - "Processing Level":p.get('processingLevel'), - "Start Time":p.get('startTime'), - "End Time":p.get('stopTime'), - "Center Lat":p.get('centerLat'), - "Center Lon":p.get('centerLon'), - "Near Start Lat":p.get('nearStartLat'), - "Near Start Lon":p.get('nearStartLon'), - "Far Start Lat":p.get('farStartLat'), - "Far Start Lon":p.get('farStartLon'), - "Near End Lat":p.get('nearEndLat'), - "Near End Lon":p.get('nearEndLon'), - "Far End Lat":p.get('farEndLat'), - "Far End Lon":p.get('farEndLon'), - "Faraday Rotation":p.get('faradayRotation'), - "Ascending or Descending?":p.get('flightDirection'), - "URL":p.get('url'), - "Size (MB)":p.get('sizeMB'), - "Off Nadir Angle":p.get('offNadirAngle'), - "Stack Size":p.get('insarStackSize'), - "Doppler":p.get('doppler'), - "GroupID":p.get('groupID'), - "Pointing Angle":p.get('pointingAngle'), - "TemporalBaseline":p.get('teporalBaseline'), - "PerpendicularBaseline":p.get('pependicularBaseline'), - "relativeBurstID": p['burst']['relativeBurstID'] if p['processingLevel'] == 'BURST' else None, - "absoluteBurstID": p['burst']['absoluteBurstID'] if p['processingLevel'] == 'BURST' else None, - "fullBurstID": p['burst']['fullBurstID'] if p['processingLevel'] == 'BURST' else None, - "burstIndex": p['burst']['burstIndex'] if p['processingLevel'] == 'BURST' else None, - "azimuthTime": p['burst']['azimuthTime'] if p['processingLevel'] == 'BURST' else None, - "azimuthAnxTime": p['burst']['azimuthAnxTime'] if p['processingLevel'] == 'BURST' else None, - "samplesPerBurst": p['burst']['samplesPerBurst'] if p['processingLevel'] == 'BURST' else None, - "subswath": p['burst']['subswath'] if p['processingLevel'] == 'BURST' else None + "Granule Name": p.get("sceneName"), + "Platform": p.get("platform"), + "Sensor": p.get("sensor"), + "Beam Mode": p.get("beamModeType"), + "Beam Mode Description": p.get("configurationName"), + "Orbit": p.get("orbit"), + "Path Number": p.get("pathNumber"), + "Frame Number": p.get("frameNumber"), + "Acquisition Date": p.get("sceneDate"), + "Processing Date": p.get("processingDate"), + "Processing Level": p.get("processingLevel"), + "Start Time": p.get("startTime"), + "End Time": p.get("stopTime"), + "Center Lat": p.get("centerLat"), + "Center Lon": p.get("centerLon"), + "Near Start Lat": p.get("nearStartLat"), + "Near Start Lon": p.get("nearStartLon"), + "Far Start Lat": p.get("farStartLat"), + "Far Start Lon": p.get("farStartLon"), + "Near End Lat": p.get("nearEndLat"), + "Near End Lon": p.get("nearEndLon"), + "Far End Lat": p.get("farEndLat"), + "Far End Lon": p.get("farEndLon"), + "Faraday Rotation": p.get("faradayRotation"), + "Ascending or Descending?": p.get("flightDirection"), + "URL": p.get("url"), + "Size (MB)": p.get("sizeMB"), + "Off Nadir Angle": p.get("offNadirAngle"), + "Stack Size": p.get("insarStackSize"), + "Doppler": p.get("doppler"), + "GroupID": p.get("groupID"), + "Pointing Angle": p.get("pointingAngle"), + "TemporalBaseline": p.get("teporalBaseline"), + "PerpendicularBaseline": p.get("pependicularBaseline"), + "relativeBurstID": p["burst"]["relativeBurstID"] + if p["processingLevel"] == "BURST" + else None, + "absoluteBurstID": p["burst"]["absoluteBurstID"] + if p["processingLevel"] == "BURST" + else None, + "fullBurstID": p["burst"]["fullBurstID"] + if p["processingLevel"] == "BURST" + else None, + "burstIndex": p["burst"]["burstIndex"] + if p["processingLevel"] == "BURST" + else None, + "azimuthTime": p["burst"]["azimuthTime"] + if p["processingLevel"] == "BURST" + else None, + "azimuthAnxTime": p["burst"]["azimuthAnxTime"] + if p["processingLevel"] == "BURST" + else None, + "samplesPerBurst": p["burst"]["samplesPerBurst"] + if p["processingLevel"] == "BURST" + else None, + "subswath": p["burst"]["subswath"] + if p["processingLevel"] == "BURST" + else None, } + class CSVBuffer: -# https://docs.djangoproject.com/en/3.2/howto/outputting-csv/#streaming-large-csv-files -# A dummy CSV buffer to be used by the csv.writer class, returns the -# formatted csv row "written" to it when writer.writerow/writeheader is called - + # https://docs.djangoproject.com/en/3.2/howto/outputting-csv/#streaming-large-csv-files + # A dummy CSV buffer to be used by the csv.writer class, returns the + # formatted csv row "written" to it when writer.writerow/writeheader is called + def write(self, value): """Write the value by returning it, instead of storing in a buffer.""" return value diff --git a/asf_search/export/export_translators.py b/asf_search/export/export_translators.py index f34f5706..832bbd50 100644 --- a/asf_search/export/export_translators.py +++ b/asf_search/export/export_translators.py @@ -3,28 +3,35 @@ from asf_search import ASFSearchResults -# ASFProduct.properties don't have every property required of certain output formats, + +# ASFProduct.properties don't have every property required of certain output formats, # This grabs the missing properties from ASFProduct.umm required by the given format -def ASFSearchResults_to_properties_list(results: ASFSearchResults, get_additional_fields: FunctionType): +def ASFSearchResults_to_properties_list( + results: ASFSearchResults, get_additional_fields: FunctionType +): property_list = [] - + for product in results: additional_fields = get_additional_fields(product) properties = {**product.properties, **additional_fields} property_list.append(properties) - + # Format dates to match format used by SearchAPI output formats for product in property_list: # S1 date properties are formatted differently from other platforms - is_S1 = product['platform'].upper() in ['SENTINEL-1', 'SENTINEL-1B', 'SENTINEL-1A'] + is_S1 = product["platform"].upper() in [ + "SENTINEL-1", + "SENTINEL-1B", + "SENTINEL-1A", + ] for key, data in product.items(): - if ('date' in key.lower() or 'time' in key.lower()) and data is not None: + if ("date" in key.lower() or "time" in key.lower()) and data is not None: if not is_S1: # Remove trailing zeroes from miliseconds, add Z - if len(data.split('.')) == 2: - d = len(data.split('.')[0]) - data = data[:d] + 'Z' - time = datetime.strptime(data, '%Y-%m-%dT%H:%M:%SZ') - product[key] = time.strftime('%Y-%m-%dT%H:%M:%SZ') + if len(data.split(".")) == 2: + d = len(data.split(".")[0]) + data = data[:d] + "Z" + time = datetime.strptime(data, "%Y-%m-%dT%H:%M:%SZ") + product[key] = time.strftime("%Y-%m-%dT%H:%M:%SZ") return property_list diff --git a/asf_search/export/geojson.py b/asf_search/export/geojson.py index 2cb51d87..88434294 100644 --- a/asf_search/export/geojson.py +++ b/asf_search/export/geojson.py @@ -4,17 +4,23 @@ from asf_search import ASF_LOGGER + def results_to_geojson(results): - ASF_LOGGER.info('started translating results to geojson format') + ASF_LOGGER.info("started translating results to geojson format") - if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): + if not inspect.isgeneratorfunction(results) and not isinstance( + results, GeneratorType + ): results = [results] - + streamer = GeoJSONStreamArray(results) - for p in json.JSONEncoder(indent=2, sort_keys=True).iterencode({'type': 'FeatureCollection','features':streamer}): + for p in json.JSONEncoder(indent=2, sort_keys=True).iterencode( + {"type": "FeatureCollection", "features": streamer} + ): yield p + class GeoJSONStreamArray(list): def __init__(self, results): self.results = results @@ -28,19 +34,19 @@ def __iter__(self): def __len__(self): return self.len - + def streamDicts(self): completed = False for page_idx, page in enumerate(self.results): ASF_LOGGER.info(f"Streaming {len(page)} products from page {page_idx}") completed = page.searchComplete - + yield from [self.getItem(p) for p in page if p is not None] if not completed: - ASF_LOGGER.warn('Failed to download all results from CMR') + ASF_LOGGER.warn("Failed to download all results from CMR") + + ASF_LOGGER.info("Finished streaming geojson results") - ASF_LOGGER.info('Finished streaming geojson results') - def getItem(self, p): return p.geojson() diff --git a/asf_search/export/jsonlite.py b/asf_search/export/jsonlite.py index 8f581cfd..f6a5454f 100644 --- a/asf_search/export/jsonlite.py +++ b/asf_search/export/jsonlite.py @@ -9,42 +9,54 @@ from asf_search.export.export_translators import ASFSearchResults_to_properties_list extra_jsonlite_fields = [ - ('processingTypeDisplay', ['AdditionalAttributes', ('Name', 'PROCESSING_TYPE_DISPLAY'), 'Values', 0]), - ('thumb', ['AdditionalAttributes', ('Name', 'THUMBNAIL_URL'), 'Values', 0]), - ('faradayRotation', ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0]), - ('sizeMB', ['DataGranule', 'ArchiveAndDistributionInformation', 0, 'Size']), - ('flightLine', ['AdditionalAttributes', ('Name', 'FLIGHT_LINE'), 'Values', 0]), - ('missionName', ['AdditionalAttributes', ('Name', 'MISSION_NAME'), 'Values', 0]), + ( + "processingTypeDisplay", + ["AdditionalAttributes", ("Name", "PROCESSING_TYPE_DISPLAY"), "Values", 0], + ), + ("thumb", ["AdditionalAttributes", ("Name", "THUMBNAIL_URL"), "Values", 0]), + ( + "faradayRotation", + ["AdditionalAttributes", ("Name", "FARADAY_ROTATION"), "Values", 0], + ), + ("sizeMB", ["DataGranule", "ArchiveAndDistributionInformation", 0, "Size"]), + ("flightLine", ["AdditionalAttributes", ("Name", "FLIGHT_LINE"), "Values", 0]), + ("missionName", ["AdditionalAttributes", ("Name", "MISSION_NAME"), "Values", 0]), ] + def results_to_jsonlite(results): - ASF_LOGGER.info('started translating results to jsonlite format') + ASF_LOGGER.info("started translating results to jsonlite format") - if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): + if not inspect.isgeneratorfunction(results) and not isinstance( + results, GeneratorType + ): results = [results] - + streamer = JSONLiteStreamArray(results) - jsondata = {'results': streamer} + jsondata = {"results": streamer} for p in json.JSONEncoder(indent=2, sort_keys=True).iterencode(jsondata): yield p + def unwrap_shape(x, y, z=None): - x = x if x > 0 else x + 360 - return tuple([x, y]) + x = x if x > 0 else x + 360 + return tuple([x, y]) + def get_wkts(geometry) -> Tuple[str, str]: wrapped = shape(geometry) - + min_lon, max_lon = (wrapped.bounds[0], wrapped.bounds[2]) - - if max_lon - min_lon > 180: + + if max_lon - min_lon > 180: unwrapped = transform(unwrap_shape, wrapped) else: unwrapped = wrapped return wrapped.wkt, unwrapped.wkt + class JSONLiteStreamArray(list): def __init__(self, results): self.results = results @@ -61,130 +73,155 @@ def __len__(self): def get_additional_output_fields(self, product): # umm = product.umm - + additional_fields = {} for key, path in extra_jsonlite_fields: additional_fields[key] = product.umm_get(product.umm, *path) - if product.properties['platform'].upper() in ['ALOS', 'RADARSAT-1', 'JERS-1', 'ERS-1', 'ERS-2']: - insarGrouping = product.umm_get(product.umm, *['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]) - - if insarGrouping not in [None, 0, '0', 'NA', 'NULL']: - additional_fields['canInsar'] = True - additional_fields['insarStackSize'] = product.umm_get(product.umm, *['AdditionalAttributes', ('Name', 'INSAR_STACK_SIZE'), 'Values', 0]) + if product.properties["platform"].upper() in [ + "ALOS", + "RADARSAT-1", + "JERS-1", + "ERS-1", + "ERS-2", + ]: + insarGrouping = product.umm_get( + product.umm, + *["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0], + ) + + if insarGrouping not in [None, 0, "0", "NA", "NULL"]: + additional_fields["canInsar"] = True + additional_fields["insarStackSize"] = product.umm_get( + product.umm, + *[ + "AdditionalAttributes", + ("Name", "INSAR_STACK_SIZE"), + "Values", + 0, + ], + ) else: - additional_fields['canInsar'] = False + additional_fields["canInsar"] = False else: - additional_fields['canInsar'] = product.baseline is not None + additional_fields["canInsar"] = product.baseline is not None + + additional_fields["geometry"] = product.geometry - additional_fields['geometry'] = product.geometry - return additional_fields def streamDicts(self): - completed = False for page_idx, page in enumerate(self.results): ASF_LOGGER.info(f"Streaming {len(page)} products from page {page_idx}") completed = page.searchComplete - - yield from [self.getItem(p) for p in ASFSearchResults_to_properties_list(page, self.get_additional_output_fields) if p is not None] + + yield from [ + self.getItem(p) + for p in ASFSearchResults_to_properties_list( + page, self.get_additional_output_fields + ) + if p is not None + ] if not completed: - ASF_LOGGER.warn('Failed to download all results from CMR') + ASF_LOGGER.warn("Failed to download all results from CMR") ASF_LOGGER.info(f"Finished streaming {self.getOutputType()} results") - + def getItem(self, p): for i in p.keys(): - if p[i] == 'NA' or p[i] == '': + if p[i] == "NA" or p[i] == "": p[i] = None try: - if p.get('offNadirAngle') is not None and float(p['offNadirAngle']) < 0: - p['offNadirAngle'] = None + if p.get("offNadirAngle") is not None and float(p["offNadirAngle"]) < 0: + p["offNadirAngle"] = None except TypeError: pass try: - if p.get('patNumber'): - if float(p['pathNumber']) < 0: - p['pathNumber'] = None + if p.get("patNumber"): + if float(p["pathNumber"]) < 0: + p["pathNumber"] = None except TypeError: pass try: - if p.get('groupID') is None: - p['groupID'] = p['sceneName'] + if p.get("groupID") is None: + p["groupID"] = p["sceneName"] except TypeError: pass try: - p['sizeMB'] = float(p['sizeMB']) + p["sizeMB"] = float(p["sizeMB"]) except TypeError: pass try: - p['pathNumber'] = int(p['pathNumber']) + p["pathNumber"] = int(p["pathNumber"]) except TypeError: pass try: - p['frameNumber'] = int(p['frameNumber']) + p["frameNumber"] = int(p["frameNumber"]) except TypeError: pass try: - p['orbit'] = int(p['orbit']) + p["orbit"] = int(p["orbit"]) except TypeError: pass - wrapped, unwrapped = get_wkts(p['geometry']) + wrapped, unwrapped = get_wkts(p["geometry"]) result = { - 'beamMode': p['beamModeType'], - 'browse': [] if p.get('browse') is None else p.get('browse'), - 'canInSAR': p.get('canInsar'), - 'dataset': p.get('platform'), - 'downloadUrl': p.get('url'), - 'faradayRotation': p.get('faradayRotation'), # ALOS - 'fileName': p.get('fileName'), - 'flightDirection': p.get('flightDirection'), - 'flightLine': p.get('flightLine'), - 'frame': p.get('frameNumber'), - 'granuleName': p.get('sceneName'), - 'groupID': p.get('groupID'), - 'instrument': p.get('sensor'), - 'missionName': p.get('missionName'), - 'offNadirAngle': str(p['offNadirAngle']) if p.get('offNadirAngle') is not None else None, # ALOS - 'orbit': [str(p['orbit'])], - 'path': p.get('pathNumber'), - 'polarization': p.get('polarization'), - 'pointingAngle': p.get('pointingAngle'), - 'productID': p.get('fileID'), - 'productType': p.get('processingLevel'), - 'productTypeDisplay': p.get('processingTypeDisplay'), - 'sizeMB': p.get('sizeMB'), - 'stackSize': p.get('insarStackSize'), # Used for datasets with precalculated stacks - 'startTime': p.get('startTime'), - 'stopTime': p.get('stopTime'), - 'thumb': p.get('thumb'), - 'wkt': wrapped, - 'wkt_unwrapped': unwrapped, - 'pgeVersion': p.get('pgeVersion') + "beamMode": p["beamModeType"], + "browse": [] if p.get("browse") is None else p.get("browse"), + "canInSAR": p.get("canInsar"), + "dataset": p.get("platform"), + "downloadUrl": p.get("url"), + "faradayRotation": p.get("faradayRotation"), # ALOS + "fileName": p.get("fileName"), + "flightDirection": p.get("flightDirection"), + "flightLine": p.get("flightLine"), + "frame": p.get("frameNumber"), + "granuleName": p.get("sceneName"), + "groupID": p.get("groupID"), + "instrument": p.get("sensor"), + "missionName": p.get("missionName"), + "offNadirAngle": str(p["offNadirAngle"]) + if p.get("offNadirAngle") is not None + else None, # ALOS + "orbit": [str(p["orbit"])], + "path": p.get("pathNumber"), + "polarization": p.get("polarization"), + "pointingAngle": p.get("pointingAngle"), + "productID": p.get("fileID"), + "productType": p.get("processingLevel"), + "productTypeDisplay": p.get("processingTypeDisplay"), + "sizeMB": p.get("sizeMB"), + "stackSize": p.get( + "insarStackSize" + ), # Used for datasets with precalculated stacks + "startTime": p.get("startTime"), + "stopTime": p.get("stopTime"), + "thumb": p.get("thumb"), + "wkt": wrapped, + "wkt_unwrapped": unwrapped, + "pgeVersion": p.get("pgeVersion"), } - + for key in result.keys(): - if result[key] in [ 'NA', 'NULL']: + if result[key] in ["NA", "NULL"]: result[key] = None - if 'temporalBaseline' in p.keys() or 'perpendicularBaseline' in p.keys(): - result['temporalBaseline'] = p['temporalBaseline'] - result['perpendicularBaseline'] = p['perpendicularBaseline'] + if "temporalBaseline" in p.keys() or "perpendicularBaseline" in p.keys(): + result["temporalBaseline"] = p["temporalBaseline"] + result["perpendicularBaseline"] = p["perpendicularBaseline"] - if p.get('processingLevel') == 'BURST': # is a burst product - result['burst'] = p['burst'] + if p.get("processingLevel") == "BURST": # is a burst product + result["burst"] = p["burst"] return result def getOutputType(self) -> str: - return 'jsonlite' - \ No newline at end of file + return "jsonlite" diff --git a/asf_search/export/jsonlite2.py b/asf_search/export/jsonlite2.py index 5cd936b2..784f5c7f 100644 --- a/asf_search/export/jsonlite2.py +++ b/asf_search/export/jsonlite2.py @@ -5,63 +5,73 @@ from asf_search import ASF_LOGGER from .jsonlite import JSONLiteStreamArray + def results_to_jsonlite2(results): - ASF_LOGGER.info('started translating results to jsonlite2 format') + ASF_LOGGER.info("started translating results to jsonlite2 format") - if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): + if not inspect.isgeneratorfunction(results) and not isinstance( + results, GeneratorType + ): results = [results] - + streamer = JSONLite2StreamArray(results) - for p in json.JSONEncoder(sort_keys=True, separators=(',', ':')).iterencode({'results': streamer}): + for p in json.JSONEncoder(sort_keys=True, separators=(",", ":")).iterencode( + {"results": streamer} + ): yield p + class JSONLite2StreamArray(JSONLiteStreamArray): def getItem(self, p): - # pre-processing of the result is the same as in the base jsonlite streamer, - # so use that and then rename/substitute fields + # pre-processing of the result is the same as in the base jsonlite streamer, + # so use that and then rename/substitute fields p = super().getItem(p) result = { - 'b': [a.replace(p['granuleName'], '{gn}') for a in p['browse']] if p['browse'] is not None else p['browse'], - 'bm': p['beamMode'], - 'd': p['dataset'], - 'du': p['downloadUrl'].replace(p['granuleName'], '{gn}'), - 'f': p['frame'], - 'fd': p['flightDirection'], - 'fl': p['flightLine'], - 'fn': p['fileName'].replace(p['granuleName'], '{gn}'), - 'fr': p['faradayRotation'], # ALOS - 'gid': p['groupID'].replace(p['granuleName'], '{gn}'), - 'gn': p['granuleName'], - 'i': p['instrument'], - 'in': p['canInSAR'], - 'mn': p['missionName'], - 'o': p['orbit'], - 'on': p['offNadirAngle'], # ALOS - 'p': p['path'], - 'pid': p['productID'].replace(p['granuleName'], '{gn}'), - 'pa': p['pointingAngle'], - 'po': p['polarization'], - 'pt': p['productType'], - 'ptd': p['productTypeDisplay'], - 's': p['sizeMB'], - 'ss': p['stackSize'], # Used for datasets with precalculated stacks - 'st': p['startTime'], - 'stp': p['stopTime'], - 't': p['thumb'].replace(p['granuleName'], '{gn}') if p['thumb'] is not None else p['thumb'], - 'w': p['wkt'], - 'wu': p['wkt_unwrapped'], - 'pge': p['pgeVersion'] + "b": [a.replace(p["granuleName"], "{gn}") for a in p["browse"]] + if p["browse"] is not None + else p["browse"], + "bm": p["beamMode"], + "d": p["dataset"], + "du": p["downloadUrl"].replace(p["granuleName"], "{gn}"), + "f": p["frame"], + "fd": p["flightDirection"], + "fl": p["flightLine"], + "fn": p["fileName"].replace(p["granuleName"], "{gn}"), + "fr": p["faradayRotation"], # ALOS + "gid": p["groupID"].replace(p["granuleName"], "{gn}"), + "gn": p["granuleName"], + "i": p["instrument"], + "in": p["canInSAR"], + "mn": p["missionName"], + "o": p["orbit"], + "on": p["offNadirAngle"], # ALOS + "p": p["path"], + "pid": p["productID"].replace(p["granuleName"], "{gn}"), + "pa": p["pointingAngle"], + "po": p["polarization"], + "pt": p["productType"], + "ptd": p["productTypeDisplay"], + "s": p["sizeMB"], + "ss": p["stackSize"], # Used for datasets with precalculated stacks + "st": p["startTime"], + "stp": p["stopTime"], + "t": p["thumb"].replace(p["granuleName"], "{gn}") + if p["thumb"] is not None + else p["thumb"], + "w": p["wkt"], + "wu": p["wkt_unwrapped"], + "pge": p["pgeVersion"], } - if 'temporalBaseline' in p.keys() or 'perpendicularBaseline' in p.keys(): - result['tb'] = p['temporalBaseline'] - result['pb'] = p['perpendicularBaseline'] + if "temporalBaseline" in p.keys() or "perpendicularBaseline" in p.keys(): + result["tb"] = p["temporalBaseline"] + result["pb"] = p["perpendicularBaseline"] + + if p.get("burst") is not None: # is a burst product + result["s1b"] = p["burst"] - if p.get('burst') is not None: # is a burst product - result['s1b'] = p['burst'] - return result def getOutputType(self) -> str: - return 'jsonlite2' + return "jsonlite2" diff --git a/asf_search/export/kml.py b/asf_search/export/kml.py index 1486a1f8..7263710d 100644 --- a/asf_search/export/kml.py +++ b/asf_search/export/kml.py @@ -6,24 +6,52 @@ import xml.etree.ElementTree as ETree extra_kml_fields = [ - ('configurationName', ['AdditionalAttributes', ('Name', 'BEAM_MODE_DESC'), 'Values', 0]), - ('faradayRotation', ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0]), - ('processingTypeDisplay', ['AdditionalAttributes', ('Name', 'PROCESSING_TYPE_DISPLAY'), 'Values', 0]), - ('sceneDate', ['AdditionalAttributes', ('Name', 'ACQUISITION_DATE'), 'Values', 0]), - ('shape', ['SpatialExtent', 'HorizontalSpatialDomain', 'Geometry', 'GPolygons', 0, 'Boundary', 'Points']), - ('thumbnailUrl', ['AdditionalAttributes', ('Name', 'THUMBNAIL_URL'), 'Values', 0]), - ('faradayRotation', ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0]), - ('offNadirAngle', ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0]) + ( + "configurationName", + ["AdditionalAttributes", ("Name", "BEAM_MODE_DESC"), "Values", 0], + ), + ( + "faradayRotation", + ["AdditionalAttributes", ("Name", "FARADAY_ROTATION"), "Values", 0], + ), + ( + "processingTypeDisplay", + ["AdditionalAttributes", ("Name", "PROCESSING_TYPE_DISPLAY"), "Values", 0], + ), + ("sceneDate", ["AdditionalAttributes", ("Name", "ACQUISITION_DATE"), "Values", 0]), + ( + "shape", + [ + "SpatialExtent", + "HorizontalSpatialDomain", + "Geometry", + "GPolygons", + 0, + "Boundary", + "Points", + ], + ), + ("thumbnailUrl", ["AdditionalAttributes", ("Name", "THUMBNAIL_URL"), "Values", 0]), + ( + "faradayRotation", + ["AdditionalAttributes", ("Name", "FARADAY_ROTATION"), "Values", 0], + ), + ( + "offNadirAngle", + ["AdditionalAttributes", ("Name", "OFF_NADIR_ANGLE"), "Values", 0], + ), ] + def results_to_kml(results): - ASF_LOGGER.info('Started translating results to kml format') - + ASF_LOGGER.info("Started translating results to kml format") + if inspect.isgeneratorfunction(results) or isinstance(results, GeneratorType): return KMLStreamArray(results) - + return KMLStreamArray([results]) + class KMLStreamArray(MetalinkStreamArray): def __init__(self, results): MetalinkStreamArray.__init__(self, results) @@ -42,134 +70,154 @@ def __init__(self, results): \n """ self.footer = """\n""" - def getOutputType(self) -> str: - return 'kml' - + return "kml" + def get_additional_fields(self, product): umm = product.umm additional_fields = {} for key, path in extra_kml_fields: additional_fields[key] = product.umm_get(umm, *path) return additional_fields - + def getItem(self, p): placemark = ETree.Element("Placemark") - name = ETree.Element('name') - name.text = p['sceneName'] + name = ETree.Element("name") + name.text = p["sceneName"] placemark.append(name) - - description = ETree.Element('description') + + description = ETree.Element("description") description.text = """<![CDATA[""" placemark.append(description) - - h1 = ETree.Element('h1') - h1.text = f"{p['platform']} ({p['configurationName']}), acquired {p['sceneDate']}" - h2 = ETree.Element('h2') - h2.text = p.get('url', '') + + h1 = ETree.Element("h1") + h1.text = ( + f"{p['platform']} ({p['configurationName']}), acquired {p['sceneDate']}" + ) + h2 = ETree.Element("h2") + h2.text = p.get("url", "") description.append(h1) description.append(h2) - - div = ETree.Element('div', attrib={'style': 'position:absolute;left:20px;top:200px'}) + + div = ETree.Element( + "div", attrib={"style": "position:absolute;left:20px;top:200px"} + ) description.append(div) - h3 = ETree.Element('h3') - h3.text = 'Metadata' + h3 = ETree.Element("h3") + h3.text = "Metadata" div.append(h3) - - ul = ETree.Element('ul') + + ul = ETree.Element("ul") div.append(ul) - + for text, value in self.metadata_fields(p).items(): - li = ETree.Element('li') + li = ETree.Element("li") li.text = text + str(value) ul.append(li) - - d = ETree.Element('div', attrib={'style': "position:absolute;left:300px;top:250px"}) + + d = ETree.Element( + "div", attrib={"style": "position:absolute;left:300px;top:250px"} + ) description.append(d) - - a = ETree.Element('a') - if p.get('browse') is not None: - a.set('href', p.get('browse')[0]) + + a = ETree.Element("a") + if p.get("browse") is not None: + a.set("href", p.get("browse")[0]) else: - a.set('href', "") - + a.set("href", "") + d.append(a) - - img = ETree.Element('img') - if p.get('thumbnailUrl') is not None: - img.set('src', p.get('thumbnailUrl')) + + img = ETree.Element("img") + if p.get("thumbnailUrl") is not None: + img.set("src", p.get("thumbnailUrl")) else: - img.set('src', "None") + img.set("src", "None") a.append(img) - - styleUrl = ETree.Element('styleUrl') - styleUrl.text = '#yellowLineGreenPoly' + + styleUrl = ETree.Element("styleUrl") + styleUrl.text = "#yellowLineGreenPoly" placemark.append(styleUrl) - - polygon = ETree.Element('Polygon') + + polygon = ETree.Element("Polygon") placemark.append(polygon) - extrude = ETree.Element('extrude') - extrude.text = '1' + extrude = ETree.Element("extrude") + extrude.text = "1" polygon.append(extrude) - - altitudeMode = ETree.Element('altitudeMode') - altitudeMode.text = 'relativeToGround' + + altitudeMode = ETree.Element("altitudeMode") + altitudeMode.text = "relativeToGround" polygon.append(altitudeMode) - - outerBondaryIs = ETree.Element('outerBoundaryIs') + + outerBondaryIs = ETree.Element("outerBoundaryIs") polygon.append(outerBondaryIs) - + linearRing = ETree.Element("LinearRing") outerBondaryIs.append(linearRing) - - coordinates = ETree.Element('coordinates') - - if p.get('shape') is not None: - coordinates.text = '\n' + (14 * ' ') + ('\n' + (14 * ' ')).join([f"{c['Longitude']},{c['Latitude']},2000" for c in p.get('shape')]) + '\n' + (14 * ' ') + + coordinates = ETree.Element("coordinates") + + if p.get("shape") is not None: + coordinates.text = ( + "\n" + + (14 * " ") + + ("\n" + (14 * " ")).join( + [f"{c['Longitude']},{c['Latitude']},2000" for c in p.get("shape")] + ) + + "\n" + + (14 * " ") + ) linearRing.append(coordinates) self.indent(placemark, 3) - + # for CDATA section, manually replace & escape character with & - return ETree.tostring(placemark, encoding='unicode').replace('&', '&') - + return ETree.tostring(placemark, encoding="unicode").replace("&", "&") + # Helper method for getting additional fields in
      tag def metadata_fields(self, item: Dict): required = { - 'Processing type: ': item['processingTypeDisplay'], - 'Frame: ': item['frameNumber'], - 'Path: ': item['pathNumber'], - 'Orbit: ': item['orbit'], - 'Start time: ': item['startTime'], - 'End time: ': item['stopTime'], + "Processing type: ": item["processingTypeDisplay"], + "Frame: ": item["frameNumber"], + "Path: ": item["pathNumber"], + "Orbit: ": item["orbit"], + "Start time: ": item["startTime"], + "End time: ": item["stopTime"], } - + optional = {} - for text, key in [('Faraday Rotation: ', 'faradayRotation'), ('Ascending/Descending: ', 'flightDirection'), ('Off Nadir Angle: ', 'offNadirAngle'), ('Pointing Angle: ', 'pointingAngle'), ('Temporal Baseline: ', 'temporalBaseline'), ('Perpendicular Baseline: ', 'perpendicularBaseline')]: + for text, key in [ + ("Faraday Rotation: ", "faradayRotation"), + ("Ascending/Descending: ", "flightDirection"), + ("Off Nadir Angle: ", "offNadirAngle"), + ("Pointing Angle: ", "pointingAngle"), + ("Temporal Baseline: ", "temporalBaseline"), + ("Perpendicular Baseline: ", "perpendicularBaseline"), + ]: if item.get(key) is not None: - if type(item[key]) == float and key == 'offNadirAngle': - optional[text] = f'{item[key]:g}' #trim trailing zeros + if isinstance(item[key], float) and key == "offNadirAngle": + optional[text] = f"{item[key]:g}" # trim trailing zeros else: optional[text] = item[key] - elif key not in ['temporalBaseline', 'perpendicularBaseline']: - optional[text] = 'None' - - output = { **required, **optional } - if item['processingLevel'] == 'BURST': + elif key not in ["temporalBaseline", "perpendicularBaseline"]: + optional[text] = "None" + + output = {**required, **optional} + if item["processingLevel"] == "BURST": burst = { - 'Absolute Burst ID: ' : item['burst']['absoluteBurstID'], - 'Relative Burst ID: ' : item['burst']['relativeBurstID'], - 'Full Burst ID: ': item['burst']['fullBurstID'], - 'Burst Index: ': item['burst']['burstIndex'], - 'Azimuth Time: ': item['burst']['azimuthTime'], - 'Azimuth Anx Time: ': item['burst']['azimuthAnxTime'], - 'Samples per Burst: ': item['burst']['samplesPerBurst'], - 'Subswath: ': item['burst']['subswath'] + "Absolute Burst ID: ": item["burst"]["absoluteBurstID"], + "Relative Burst ID: ": item["burst"]["relativeBurstID"], + "Full Burst ID: ": item["burst"]["fullBurstID"], + "Burst Index: ": item["burst"]["burstIndex"], + "Azimuth Time: ": item["burst"]["azimuthTime"], + "Azimuth Anx Time: ": item["burst"]["azimuthAnxTime"], + "Samples per Burst: ": item["burst"]["samplesPerBurst"], + "Subswath: ": item["burst"]["subswath"], } - - output = { **output, **burst} - - return output \ No newline at end of file + + output = {**output, **burst} + + return output diff --git a/asf_search/export/metalink.py b/asf_search/export/metalink.py index 15c1736d..7171f624 100644 --- a/asf_search/export/metalink.py +++ b/asf_search/export/metalink.py @@ -4,21 +4,23 @@ from asf_search import ASF_LOGGER from asf_search.export.export_translators import ASFSearchResults_to_properties_list + def results_to_metalink(results): - ASF_LOGGER.info('Started translating results to metalink format') - + ASF_LOGGER.info("Started translating results to metalink format") + if inspect.isgeneratorfunction(results) or isinstance(results, GeneratorType): return MetalinkStreamArray(results) - + return MetalinkStreamArray([results]) + class MetalinkStreamArray(list): def __init__(self, results): self.pages = results self.len = 1 self.header = """ - Alaska Satellite Facilityhttp://www.asf.alaska.edu/ + Alaska Satellite Facilityhttp://www.asf.alaska.edu/ # noqa F401 """ self.footer = """ @@ -26,7 +28,7 @@ def __init__(self, results): def get_additional_fields(self, product): return {} - + def __iter__(self): return self.streamPages() @@ -35,62 +37,63 @@ def __len__(self): def streamPages(self): yield self.header - + completed = False for page_idx, page in enumerate(self.pages): ASF_LOGGER.info(f"Streaming {len(page)} products from page {page_idx}") completed = page.searchComplete - - properties_list = ASFSearchResults_to_properties_list(page, self.get_additional_fields) + + properties_list = ASFSearchResults_to_properties_list( + page, self.get_additional_fields + ) yield from [self.getItem(p) for p in properties_list] - + if not completed: - ASF_LOGGER.warn('Failed to download all results from CMR') - + ASF_LOGGER.warn("Failed to download all results from CMR") + yield self.footer - + ASF_LOGGER.info(f"Finished streaming {self.getOutputType()} results") - def getOutputType(self) -> str: - return 'metalink' - + return "metalink" + def getItem(self, p): - file = ETree.Element("file", attrib={'name': p['fileName']}) - resources = ETree.Element('resources') + file = ETree.Element("file", attrib={"name": p["fileName"]}) + resources = ETree.Element("resources") - url = ETree.Element('url', attrib={'type': 'http'}) - url.text = p['url'] + url = ETree.Element("url", attrib={"type": "http"}) + url.text = p["url"] resources.append(url) file.append(resources) - - if p.get('md5sum') and p.get('md5sum') != 'NA': - verification = ETree.Element('verification') - h = ETree.Element('hash', {'type': 'md5'}) - h.text = p['md5sum'] + + if p.get("md5sum") and p.get("md5sum") != "NA": + verification = ETree.Element("verification") + h = ETree.Element("hash", {"type": "md5"}) + h.text = p["md5sum"] verification.append(h) file.append(verification) - - if p['bytes'] and p['bytes'] != 'NA': - size = ETree.Element('size') - size.text = str(p['bytes']) + + if p["bytes"] and p["bytes"] != "NA": + size = ETree.Element("size") + size.text = str(p["bytes"]) file.append(size) - - return '\n' + (8*' ') + ETree.tostring(file, encoding='unicode') + + return "\n" + (8 * " ") + ETree.tostring(file, encoding="unicode") def indent(self, elem, level=0): # Only Python 3.9+ has a built-in indent function for element tree. # https://stackoverflow.com/a/33956544 - i = "\n" + level*" " + i = "\n" + level * " " if len(elem): if not elem.text or not elem.text.strip(): elem.text = i + " " if not elem.tail or not elem.tail.strip(): elem.tail = i for elem in elem: - self.indent(elem, level+1) + self.indent(elem, level + 1) if not elem.tail or not elem.tail.strip(): elem.tail = i else: if level and (not elem.tail or not elem.tail.strip()): - elem.tail = i \ No newline at end of file + elem.tail = i diff --git a/asf_search/health/__init__.py b/asf_search/health/__init__.py index db175e6f..d76bf3ea 100644 --- a/asf_search/health/__init__.py +++ b/asf_search/health/__init__.py @@ -1 +1 @@ -from .health import health \ No newline at end of file +from .health import health # noqa: F401 diff --git a/asf_search/health/health.py b/asf_search/health/health.py index f4f77a93..5adfaaa9 100644 --- a/asf_search/health/health.py +++ b/asf_search/health/health.py @@ -4,14 +4,24 @@ import asf_search.constants + def health(host: str = None) -> Dict: """ Checks basic connectivity to and health of the ASF SearchAPI. - :param host: SearchAPI host, defaults to Production SearchAPI. This option is intended for dev/test purposes. - :return: Current configuration and status of subsystems + Parameters + ---------- + param host: + SearchAPI host, defaults to Production SearchAPI. + This option is intended for dev/test purposes. + + Returns + ------- + Current configuration and status of subsystems as a dict """ if host is None: host = asf_search.INTERNAL.CMR_HOST - return json.loads(requests.get(f'https://{host}{asf_search.INTERNAL.CMR_HEALTH_PATH}').text) \ No newline at end of file + return json.loads( + requests.get(f"https://{host}{asf_search.INTERNAL.CMR_HEALTH_PATH}").text + ) diff --git a/asf_search/search/__init__.py b/asf_search/search/__init__.py index 9f1344b2..133b1f09 100644 --- a/asf_search/search/__init__.py +++ b/asf_search/search/__init__.py @@ -1,8 +1,8 @@ -from .search import search -from .granule_search import granule_search -from .product_search import product_search -from .geo_search import geo_search -from .baseline_search import stack_from_id -from .campaigns import campaigns -from .search_count import search_count -from .search_generator import search_generator \ No newline at end of file +from .search import search # noqa: F401 +from .granule_search import granule_search # noqa: F401 +from .product_search import product_search # noqa: F401 +from .geo_search import geo_search # noqa: F401 +from .baseline_search import stack_from_id # noqa: F401 +from .campaigns import campaigns # noqa: F401 +from .search_count import search_count # noqa: F401 +from .search_generator import search_generator # noqa: F401 diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index f3b46a6c..a8ce3c17 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -9,7 +9,7 @@ from asf_search import ASFProduct from asf_search.constants import PLATFORM from asf_search.exceptions import ASFSearchError -from copy import copy + precalc_platforms = [ PLATFORM.ALOS, @@ -21,21 +21,31 @@ def stack_from_product( - reference: ASFProduct, - opts: ASFSearchOptions = None, - ASFProductSubclass: Type[ASFProduct] = None - ) -> ASFSearchResults: + reference: ASFProduct, + opts: ASFSearchOptions = None, + ASFProductSubclass: Type[ASFProduct] = None, +) -> ASFSearchResults: """ Finds a baseline stack from a reference ASFProduct - :param reference: Reference scene to base the stack on, and from which to calculate perpendicular/temporal baselines - :param opts: An ASFSearchOptions object describing the search parameters to be used. Search parameters specified outside this object will override in event of a conflict. - :param ASFProductSubclass: An ASFProduct subclass constructor - - :return: ASFSearchResults(dict) of search results + Parameters + ---------- + reference: + Reference scene to base the stack on, + and from which to calculate perpendicular/temporal baselines + opts: + An ASFSearchOptions object describing the search parameters to be used. + Search parameters specified outside this object will override in event of a conflict. + ASFProductSubclass: + An ASFProduct subclass constructor. + + Returns + ------- + `asf_search.ASFSearchResults` + list of search results of subclass ASFProduct or of provided ASFProductSubclass """ - opts = (ASFSearchOptions() if opts is None else copy(opts)) + opts = ASFSearchOptions() if opts is None else copy(opts) opts.merge_args(**dict(reference.get_stack_opts())) @@ -47,54 +57,67 @@ def stack_from_product( _cast_results_to_subclass(stack, ASFProductSubclass) stack, warnings = get_baseline_from_stack(reference=reference, stack=stack) - stack.searchComplete = is_complete # preserve final outcome of earlier search() + stack.searchComplete = is_complete # preserve final outcome of earlier search() - stack.sort(key=lambda product: product.properties['temporalBaseline']) + stack.sort(key=lambda product: product.properties["temporalBaseline"]) for warning in warnings: - ASF_LOGGER.warning(f'{warning}') - + ASF_LOGGER.warning(f"{warning}") + return stack def stack_from_id( - reference_id: str, - opts: ASFSearchOptions = None, - useSubclass: Type[ASFProduct] = None + reference_id: str, + opts: ASFSearchOptions = None, + useSubclass: Type[ASFProduct] = None, ) -> ASFSearchResults: """ Finds a baseline stack from a reference product ID - :param reference_id: Reference product to base the stack from, and from which to calculate perpendicular/temporal baselines - :param opts: An ASFSearchOptions object describing the search parameters to be used. Search parameters specified outside this object will override in event of a conflict. - :param ASFProductSubclass: An ASFProduct subclass constructor. - - :return: ASFSearchResults(list) of search results + Parameters + ---------- + reference_id: + Reference product to base the stack from, + and from which to calculate perpendicular/temporal baselines + opts: + An ASFSearchOptions object describing the search parameters to be used. + Search parameters specified outside this object will override in event of a conflict. + ASFProductSubclass: + An ASFProduct subclass constructor. + + Returns + ------- + `asf_search.ASFSearchResults` + list of search results of subclass ASFProduct or of provided ASFProductSubclass """ - opts = (ASFSearchOptions() if opts is None else copy(opts)) - + opts = ASFSearchOptions() if opts is None else copy(opts) reference_results = product_search(product_list=reference_id, opts=opts) - + reference_results.raise_if_incomplete() - + if len(reference_results) <= 0: - raise ASFSearchError(f'Reference product not found: {reference_id}') + raise ASFSearchError(f"Reference product not found: {reference_id}") reference = reference_results[0] - + if useSubclass is not None: reference = _cast_to_subclass(reference, useSubclass) - + return reference.stack(opts=opts, useSubclass=useSubclass) -def _cast_results_to_subclass(stack: ASFSearchResults, ASFProductSubclass: Type[ASFProduct]): + +def _cast_results_to_subclass( + stack: ASFSearchResults, ASFProductSubclass: Type[ASFProduct] +): """ Converts results from default ASFProduct subclasses to custom ones """ for idx, product in enumerate(stack): stack[idx] = _cast_to_subclass(product, ASFProductSubclass) + def _cast_to_subclass(product: ASFProduct, subclass: Type[ASFProduct]) -> ASFProduct: """ Casts this ASFProduct object as a new object of return type subclass. @@ -105,16 +128,16 @@ class MyCustomClass(ASFProduct): _base_properties = { 'some_unique_property': {'path': ['AdditionalAttributes', 'UNIQUE_PROPERTY', ...]} } - + ... - + @staticmethod def get_property_paths() -> dict: return { **ASFProduct.get_property_paths(), **MyCustomClass._base_properties } - + # subclass as constructor customReference = reference.cast_to_subclass(MyCustomClass) print(customReference.properties['some_unique_property']) @@ -126,8 +149,12 @@ def get_property_paths() -> dict: try: if isinstance(subclass, type(ASFProduct)): - return subclass(args={'umm': product.umm, 'meta': product.meta}, session=product.session) + return subclass( + args={"umm": product.umm, "meta": product.meta}, session=product.session + ) except Exception as e: - raise ValueError(f"Unable to use provided subclass {type(subclass)}, \nError Message: {e}") - - raise ValueError(f"Expected ASFProduct subclass constructor, got {type(subclass)}") \ No newline at end of file + raise ValueError( + f"Unable to use provided subclass {type(subclass)}, \nError Message: {e}" + ) + + raise ValueError(f"Expected ASFProduct subclass constructor, got {type(subclass)}") diff --git a/asf_search/search/campaigns.py b/asf_search/search/campaigns.py index 71afc023..d1541396 100644 --- a/asf_search/search/campaigns.py +++ b/asf_search/search/campaigns.py @@ -4,31 +4,28 @@ def campaigns(platform: str) -> List[str]: """ - Returns a list of campaign names for the given platform, + Returns a list of campaign names for the given platform, each name being usable as a campaign for asf_search.search() and asf_search.geo_search() - :param platform: The name of the platform to gather campaign names for. + :param platform: The name of the platform to gather campaign names for. Platforms currently supported include UAVSAR, AIRSAR, and SENTINEL-1 INTERFEROGRAM (BETA) - + :return: A list of campaign names for the given platform """ - data = { - 'include_facets': 'true', - 'provider': 'ASF' - } - - if platform != None: - if platform == 'UAVSAR': - data['platform[]'] = 'G-III' - data['instrument[]'] = 'UAVSAR' - elif platform == 'AIRSAR': - data['platform[]'] = 'DC-8' - data['instrument[]'] = 'AIRSAR' - elif platform == 'SENTINEL-1 INTERFEROGRAM (BETA)': - data['platform[]'] = 'SENTINEL-1A' + data = {"include_facets": "true", "provider": "ASF"} + + if platform is not None: + if platform == "UAVSAR": + data["platform[]"] = "G-III" + data["instrument[]"] = "UAVSAR" + elif platform == "AIRSAR": + data["platform[]"] = "DC-8" + data["instrument[]"] = "AIRSAR" + elif platform == "SENTINEL-1 INTERFEROGRAM (BETA)": + data["platform[]"] = "SENTINEL-1A" else: - data['platform[]'] = platform - + data["platform[]"] = platform + missions = get_campaigns(data) mission_names = _get_project_names(missions) @@ -37,7 +34,7 @@ def campaigns(platform: str) -> List[str]: def _get_project_names(data: Union[Dict, List]) -> List[str]: """ - Recursively searches for campaign names + Recursively searches for campaign names under "Projects" key in CMR umm_json response :param data: CMR umm_json response @@ -53,5 +50,5 @@ def _get_project_names(data: Union[Dict, List]) -> List[str]: elif isinstance(data, List): for item in data: output.extend(_get_project_names(item)) - + return output diff --git a/asf_search/search/error_reporting.py b/asf_search/search/error_reporting.py index dd1d3d10..0ab7e7c2 100644 --- a/asf_search/search/error_reporting.py +++ b/asf_search/search/error_reporting.py @@ -1,36 +1,51 @@ -from typing import Dict from asf_search import ASFSearchOptions from asf_search import INTERNAL import requests import logging + def report_search_error(search_options: ASFSearchOptions, message: str): """Reports CMR Errors automatically to ASF""" from asf_search import REPORT_ERRORS if not REPORT_ERRORS: - logging.warning("Automatic search error reporting is turned off, search errors will NOT be reported to ASF.\ - \nTo enable automatic error reporting, set asf_search.REPORT_ERRORS to True\ - \nIf you have any questions email uso@asf.alaska.edu") + logging.warning( + 'Automatic search error reporting is turned off,' + 'search errors will NOT be reported to ASF.' + '\nTo enable automatic error reporting, set asf_search.REPORT_ERRORS to True' + '\nIf you have any questions email uso@asf.alaska.edu' + ) return user_agent = search_options.session.headers.get("User-Agent") - search_options_list = '\n'.join([f"\t{option}: {key}" for option, key in dict(search_options).items()]) - message=f"Error Message: {str(message)}\nUser Agent: {user_agent} \ + search_options_list = "\n".join( + [f"\t{option}: {key}" for option, key in dict(search_options).items()] + ) + message = f"Error Message: {str(message)}\nUser Agent: {user_agent} \ \nSearch Options: {{\n{search_options_list}\n}}" - response = requests.post(f'https://{INTERNAL.ERROR_REPORTING_ENDPOINT}', data={'Message': "This error message and info was automatically generated:\n\n" + message}) + response = requests.post( + f"https://{INTERNAL.ERROR_REPORTING_ENDPOINT}", + data={ + "Message": f"This error message and info was automatically generated:\n\n{message}" + }, + ) try: response.raise_for_status() except requests.exceptions.HTTPError: - logging.error(f"asf-search failed to automatically report an error, if you have any questions email uso@asf.alaska.edu\ - \nError Text: HTTP {response.status_code}: {response.json()['errors']}") + logging.error( + 'asf-search failed to automatically report an error,' + 'if you have any questions email uso@asf.alaska.edu' + f"\nError Text: HTTP {response.status_code}: {response.json()['errors']}") return if response.status_code == 200: - logging.error("The asf-search module ecountered an error with CMR, and the following message was automatically reported to ASF:\n\n" + - "\"\n" + - message + - "\n\"" + - "If you have any questions email uso@asf.alaska.edu") + logging.error( + ( + 'The asf-search module ecountered an error with CMR,' + 'and the following message was automatically reported to ASF:' + '\n\n"\nmessage\n"' + "If you have any questions email uso@asf.alaska.edu" + ) + ) diff --git a/asf_search/search/geo_search.py b/asf_search/search/geo_search.py index 491901fd..6172e10c 100644 --- a/asf_search/search/geo_search.py +++ b/asf_search/search/geo_search.py @@ -8,76 +8,162 @@ def geo_search( - absoluteOrbit: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - asfFrame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - beamMode: Union[str, Sequence[str]] = None, - beamSwath: Union[str, Sequence[str]] = None, - campaign: Union[str, Sequence[str]] = None, - maxDoppler: float = None, - minDoppler: float = None, - end: Union[datetime.datetime, str] = None, - maxFaradayRotation: float = None, - minFaradayRotation: float = None, - flightDirection: str = None, - flightLine: str = None, - frame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - granule_list: Union[str, Sequence[str]] = None, - groupID: Union[str, Sequence[str]] = None, - insarStackId: str = None, - instrument: Union[str, Sequence[str]] = None, - intersectsWith: str = None, - lookDirection: Union[str, Sequence[str]] = None, - offNadirAngle: Union[float, Tuple[float, float], Sequence[Union[float, Tuple[float, float]]]] = None, - platform: Union[str, Sequence[str]] = None, - polarization: Union[str, Sequence[str]] = None, - processingDate: Union[datetime.datetime, str] = None, - processingLevel: Union[str, Sequence[str]] = None, - product_list: Union[str, Sequence[str]] = None, - relativeOrbit: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - season: Tuple[int, int] = None, - start: Union[datetime.datetime, str] = None, - absoluteBurstID: Union[int, Sequence[int]] = None, - relativeBurstID: Union[int, Sequence[int]] = None, - fullBurstID: Union[str, Sequence[str]] = None, - collections: Union[str, Sequence[str]] = None, - temporalBaselineDays: Union[str, Sequence[str]] = None, - operaBurstID: Union[str, Sequence[str]] = None, - dataset: Union[str, Sequence[str]] = None, - shortName: Union[str, Sequence[str]] = None, - cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None, - maxResults: int = None, - opts: ASFSearchOptions = None, + absoluteOrbit: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + asfFrame: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + beamMode: Union[str, Sequence[str]] = None, + beamSwath: Union[str, Sequence[str]] = None, + campaign: Union[str, Sequence[str]] = None, + maxDoppler: float = None, + minDoppler: float = None, + end: Union[datetime.datetime, str] = None, + maxFaradayRotation: float = None, + minFaradayRotation: float = None, + flightDirection: str = None, + flightLine: str = None, + frame: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + granule_list: Union[str, Sequence[str]] = None, + groupID: Union[str, Sequence[str]] = None, + insarStackId: str = None, + instrument: Union[str, Sequence[str]] = None, + intersectsWith: str = None, + lookDirection: Union[str, Sequence[str]] = None, + offNadirAngle: Union[ + float, Tuple[float, float], Sequence[Union[float, Tuple[float, float]]] + ] = None, + platform: Union[str, Sequence[str]] = None, + polarization: Union[str, Sequence[str]] = None, + processingDate: Union[datetime.datetime, str] = None, + processingLevel: Union[str, Sequence[str]] = None, + product_list: Union[str, Sequence[str]] = None, + relativeOrbit: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + season: Tuple[int, int] = None, + start: Union[datetime.datetime, str] = None, + absoluteBurstID: Union[int, Sequence[int]] = None, + relativeBurstID: Union[int, Sequence[int]] = None, + fullBurstID: Union[str, Sequence[str]] = None, + collections: Union[str, Sequence[str]] = None, + temporalBaselineDays: Union[str, Sequence[str]] = None, + operaBurstID: Union[str, Sequence[str]] = None, + dataset: Union[str, Sequence[str]] = None, + shortName: Union[str, Sequence[str]] = None, + cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None, + maxResults: int = None, + opts: ASFSearchOptions = None, ) -> ASFSearchResults: """ - Performs a geographic search using the ASF SearchAPI + Performs a geographic search against the Central Metadata Repository (CMR), + returning all results in a single list. - :param absoluteOrbit: For ALOS, ERS-1, ERS-2, JERS-1, and RADARSAT-1, Sentinel-1A, Sentinel-1B this value corresponds to the orbit count within the orbit cycle. For UAVSAR it is the Flight ID. - :param asfFrame: This is primarily an ASF / JAXA frame reference. However, some platforms use other conventions. See ‘frame’ for ESA-centric frame searches. - :param beamMode: The beam mode used to acquire the data. - :param beamSwath: Encompasses a look angle and beam mode. - :param campaign: For UAVSAR and AIRSAR data collections only. Search by general location, site description, or data grouping as supplied by flight agency or project. - :param end: End date of data acquisition. Supports timestamps as well as natural language such as "3 weeks ago" - :param flightDirection: Satellite orbit direction during data acquisition - :param frame: ESA-referenced frames are offered to give users a universal framing convention. Each ESA frame has a corresponding ASF frame assigned. See also: asfframe - :param instrument: The instrument used to acquire the data. See also: platform - :param intersectsWith: Search by polygon, linestring, or point defined in 2D Well-Known Text (WKT) - :param lookDirection: Left or right look direction during data acquisition - :param platform: Remote sensing platform that acquired the data. Platforms that work together, such as Sentinel-1A/1B and ERS-1/2 have multi-platform aliases available. See also: instrument - :param polarization: A property of SAR electromagnetic waves that can be used to extract meaningful information about surface properties of the earth. - :param processingDate: Used to find data that has been processed at ASF since a given time and date. Supports timestamps as well as natural language such as "3 weeks ago" - :param processingLevel: Level to which the data has been processed - :param relativeOrbit: Path or track of satellite during data acquisition. For UAVSAR it is the Line ID. - :param start: Start date of data acquisition. Supports timestamps as well as natural language such as "3 weeks ago" - :param maxResults: The maximum number of results to be returned by the search - :param opts: An ASFSearchOptions object describing the search parameters to be used. Search parameters specified outside this object will override in event of a conflict. + Parameters + ---------- + absoluteOrbit: + For ALOS, ERS-1, ERS-2, JERS-1, and RADARSAT-1, Sentinel-1A, Sentinel-1B + this value corresponds to the orbit count within the orbit cycle. + For UAVSAR it is the Flight ID. + asfFrame: + This is primarily an ASF / JAXA frame reference. However, + some platforms use other conventions. See ‘frame’ for ESA-centric frame searches. + beamMode: + The beam mode used to acquire the data. + beamSwath: + Encompasses a look angle and beam mode. + campaign: + For UAVSAR and AIRSAR data collections only. Search by general location, + site description, or data grouping as supplied by flight agency or project. + maxDoppler: + Doppler provides an indication of how much the look direction deviates + from the ideal perpendicular flight direction acquisition. + minDoppler: + Doppler provides an indication of how much the look direction deviates + from the ideal perpendicular flight direction acquisition. + end: + End date of data acquisition. Supports timestamps + as well as natural language such as "3 weeks ago" + maxFaradayRotation: + Rotation of the polarization plane of + the radar signal impacts imagery, as HH and HV signals become mixed. + minFaradayRotation: + Rotation of the polarization plane of + the radar signal impacts imagery, as HH and HV signals become mixed. + flightDirection: + Satellite orbit direction during data acquisition + flightLine: + Specify a flightline for UAVSAR or AIRSAR. + frame: + ESA-referenced frames are offered to give users a universal framing convention. + Each ESA frame has a corresponding ASF frame assigned. See also: asfframe + granule_list: + List of specific granules. + Search results may include several products per granule name. + groupID: + Identifier used to find products considered to + be of the same scene but having different granule names + insarStackId: + Identifier used to find products of the same InSAR stack + instrument: + The instrument used to acquire the data. See also: platform + intersectsWith: + Search by polygon, linestring, + or point defined in 2D Well-Known Text (WKT) + lookDirection: + Left or right look direction during data acquisition + offNadirAngle: + Off-nadir angles for ALOS PALSAR + platform: + Remote sensing platform that acquired the data. + Platforms that work together, such as Sentinel-1A/1B and ERS-1/2 + have multi-platform aliases available. See also: instrument + polarization: + A property of SAR electromagnetic waves + that can be used to extract meaningful information about surface properties of the earth. + processingDate: + Used to find data that has been processed at ASF since a given + time and date. Supports timestamps as well as natural language such as "3 weeks ago" + processingLevel: + Level to which the data has been processed + product_list: + List of specific products. + Guaranteed to be at most one product per product name. + relativeOrbit: + Path or track of satellite during data acquisition. + For UAVSAR it is the Line ID. + season: + Start and end day of year for desired seasonal range. + This option is used in conjunction with start/end to specify a seasonal range + within an overall date range. + start: + Start date of data acquisition. + Supports timestamps as well as natural language such as "3 weeks ago" + collections: + List of collections (concept-ids) to limit search to + temporalBaselineDays: + List of temporal baselines, + used for Sentinel-1 Interferogram (BETA) + maxResults: + The maximum number of results to be returned by the search + opts: + An ASFSearchOptions object describing the search parameters to be used. + Search parameters specified outside this object will override in event of a conflict. - :return: ASFSearchResults(list) of search results + Returns + ------- + `asf_search.ASFSearchResults` (list of search results of subclass ASFProduct) """ kwargs = locals() - data = dict((k, v) for k, v in kwargs.items() if k not in ['host', 'opts'] and v is not None) + data = dict( + (k, v) for k, v in kwargs.items() if k not in ["host", "opts"] and v is not None + ) - opts = (ASFSearchOptions() if opts is None else copy(opts)) + opts = ASFSearchOptions() if opts is None else copy(opts) opts.merge_args(**data) return search(opts=opts) diff --git a/asf_search/search/granule_search.py b/asf_search/search/granule_search.py index 1b17d18a..5122d0c7 100644 --- a/asf_search/search/granule_search.py +++ b/asf_search/search/granule_search.py @@ -7,20 +7,27 @@ def granule_search( - granule_list: Sequence[str], - opts: ASFSearchOptions = None + granule_list: Sequence[str], opts: ASFSearchOptions = None ) -> ASFSearchResults: """ Performs a granule name search using the ASF SearchAPI - :param granule_list: List of specific granules. Results may include several products per granule name. - :param opts: An ASFSearchOptions object describing the search parameters to be used. Search parameters specified outside this object will override in event of a conflict. + Parameters + ---------- + granule_list: + List of specific granules. + Search results may include several products per granule name. + opts: + An ASFSearchOptions object describing the search parameters to be used. + Search parameters specified outside this object will override in event of a conflict. - :return: ASFSearchResults(list) of search results + Returns + ------- + `asf_search.ASFSearchResults` (list of search results of subclass ASFProduct) """ - opts = (ASFSearchOptions() if opts is None else copy(opts)) - + opts = ASFSearchOptions() if opts is None else copy(opts) + opts.merge_args(granule_list=granule_list) - + return search(opts=opts) diff --git a/asf_search/search/product_search.py b/asf_search/search/product_search.py index e469938e..c60ebd69 100644 --- a/asf_search/search/product_search.py +++ b/asf_search/search/product_search.py @@ -7,20 +7,27 @@ def product_search( - product_list: Sequence[str], - opts: ASFSearchOptions = None + product_list: Sequence[str], opts: ASFSearchOptions = None ) -> ASFSearchResults: """ Performs a product ID search using the ASF SearchAPI - :param product_list: List of specific products. Results guaranteed to be at most one product per product name. - :param opts: An ASFSearchOptions object describing the search parameters to be used. Search parameters specified outside this object will override in event of a conflict. + Parameters + ---------- + :param product_list: + List of specific products. + Guaranteed to be at most one product per product name. + opts: + An ASFSearchOptions object describing the search parameters to be used. + Search parameters specified outside this object will override in event of a conflict. - :return: ASFSearchResults(list) of search results + Returns + ------- + `asf_search.ASFSearchResults` (list of search results of subclass ASFProduct) """ - opts = (ASFSearchOptions() if opts is None else copy(opts)) + opts = ASFSearchOptions() if opts is None else copy(opts) opts.merge_args(product_list=product_list) - + return search(opts=opts) diff --git a/asf_search/search/search.py b/asf_search/search/search.py index 37360736..eecb5c28 100644 --- a/asf_search/search/search.py +++ b/asf_search/search/search.py @@ -6,102 +6,183 @@ from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.search.search_generator import search_generator + def search( - absoluteOrbit: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - asfFrame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - beamMode: Union[str, Sequence[str]] = None, - beamSwath: Union[str, Sequence[str]] = None, - campaign: Union[str, Sequence[str]] = None, - maxDoppler: float = None, - minDoppler: float = None, - end: Union[datetime.datetime, str] = None, - maxFaradayRotation: float = None, - minFaradayRotation: float = None, - flightDirection: str = None, - flightLine: str = None, - frame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - granule_list: Union[str, Sequence[str]] = None, - groupID: Union[str, Sequence[str]] = None, - insarStackId: str = None, - instrument: Union[str, Sequence[str]] = None, - intersectsWith: str = None, - lookDirection: Union[str, Sequence[str]] = None, - offNadirAngle: Union[float, Tuple[float, float], Sequence[Union[float, Tuple[float, float]]]] = None, - platform: Union[str, Sequence[str]] = None, - polarization: Union[str, Sequence[str]] = None, - processingDate: Union[datetime.datetime, str] = None, - processingLevel: Union[str, Sequence[str]] = None, - product_list: Union[str, Sequence[str]] = None, - relativeOrbit: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - season: Tuple[int, int] = None, - start: Union[datetime.datetime, str] = None, - absoluteBurstID: Union[int, Sequence[int]] = None, - relativeBurstID: Union[int, Sequence[int]] = None, - fullBurstID: Union[str, Sequence[str]] = None, - collections: Union[str, Sequence[str]] = None, - temporalBaselineDays: Union[str, Sequence[str]] = None, - operaBurstID: Union[str, Sequence[str]] = None, - dataset: Union[str, Sequence[str]] = None, - shortName: Union[str, Sequence[str]] = None, - cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None, - maxResults: int = None, - opts: ASFSearchOptions = None, + absoluteOrbit: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + asfFrame: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + beamMode: Union[str, Sequence[str]] = None, + beamSwath: Union[str, Sequence[str]] = None, + campaign: Union[str, Sequence[str]] = None, + maxDoppler: float = None, + minDoppler: float = None, + end: Union[datetime.datetime, str] = None, + maxFaradayRotation: float = None, + minFaradayRotation: float = None, + flightDirection: str = None, + flightLine: str = None, + frame: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + granule_list: Union[str, Sequence[str]] = None, + groupID: Union[str, Sequence[str]] = None, + insarStackId: str = None, + instrument: Union[str, Sequence[str]] = None, + intersectsWith: str = None, + lookDirection: Union[str, Sequence[str]] = None, + offNadirAngle: Union[ + float, Tuple[float, float], Sequence[Union[float, Tuple[float, float]]] + ] = None, + platform: Union[str, Sequence[str]] = None, + polarization: Union[str, Sequence[str]] = None, + processingDate: Union[datetime.datetime, str] = None, + processingLevel: Union[str, Sequence[str]] = None, + product_list: Union[str, Sequence[str]] = None, + relativeOrbit: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + season: Tuple[int, int] = None, + start: Union[datetime.datetime, str] = None, + absoluteBurstID: Union[int, Sequence[int]] = None, + relativeBurstID: Union[int, Sequence[int]] = None, + fullBurstID: Union[str, Sequence[str]] = None, + collections: Union[str, Sequence[str]] = None, + temporalBaselineDays: Union[str, Sequence[str]] = None, + operaBurstID: Union[str, Sequence[str]] = None, + dataset: Union[str, Sequence[str]] = None, + shortName: Union[str, Sequence[str]] = None, + cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None, + maxResults: int = None, + opts: ASFSearchOptions = None, ) -> ASFSearchResults: """ - Performs a generic search using the ASF SearchAPI. Accepts a number of search parameters, and/or an ASFSearchOptions object. If an ASFSearchOptions object is provided as well as other specific parameters, the two sets of options will be merged, preferring the specific keyword arguments. + Performs a generic search against the Central Metadata Repository (CMR), + returning all results in a single list. + (For accessing results page by page see `asf_search.search_generator()`) + + Accepts a number of search parameters, and/or an ASFSearchOptions object. + If an ASFSearchOptions object is provided as well as other specific parameters, + the two sets of options will be merged, preferring the specific keyword arguments. - :param absoluteOrbit: For ALOS, ERS-1, ERS-2, JERS-1, and RADARSAT-1, Sentinel-1A, Sentinel-1B this value corresponds to the orbit count within the orbit cycle. For UAVSAR it is the Flight ID. - :param asfFrame: This is primarily an ASF / JAXA frame reference. However, some platforms use other conventions. See ‘frame’ for ESA-centric frame searches. - :param beamMode: The beam mode used to acquire the data. - :param beamSwath: Encompasses a look angle and beam mode. - :param campaign: For UAVSAR and AIRSAR data collections only. Search by general location, site description, or data grouping as supplied by flight agency or project. - :param maxDoppler: Doppler provides an indication of how much the look direction deviates from the ideal perpendicular flight direction acquisition. - :param minDoppler: Doppler provides an indication of how much the look direction deviates from the ideal perpendicular flight direction acquisition. - :param end: End date of data acquisition. Supports timestamps as well as natural language such as "3 weeks ago" - :param maxFaradayRotation: Rotation of the polarization plane of the radar signal impacts imagery, as HH and HV signals become mixed. - :param minFaradayRotation: Rotation of the polarization plane of the radar signal impacts imagery, as HH and HV signals become mixed. - :param flightDirection: Satellite orbit direction during data acquisition - :param flightLine: Specify a flightline for UAVSAR or AIRSAR. - :param frame: ESA-referenced frames are offered to give users a universal framing convention. Each ESA frame has a corresponding ASF frame assigned. See also: asfframe - :param granule_list: List of specific granules. Search results may include several products per granule name. - :param groupID: Identifier used to find products considered to be of the same scene but having different granule names - :param insarStackId: Identifier used to find products of the same InSAR stack - :param instrument: The instrument used to acquire the data. See also: platform - :param intersectsWith: Search by polygon, linestring, or point defined in 2D Well-Known Text (WKT) - :param lookDirection: Left or right look direction during data acquisition - :param offNadirAngle: Off-nadir angles for ALOS PALSAR - :param platform: Remote sensing platform that acquired the data. Platforms that work together, such as Sentinel-1A/1B and ERS-1/2 have multi-platform aliases available. See also: instrument - :param polarization: A property of SAR electromagnetic waves that can be used to extract meaningful information about surface properties of the earth. - :param processingDate: Used to find data that has been processed at ASF since a given time and date. Supports timestamps as well as natural language such as "3 weeks ago" - :param processingLevel: Level to which the data has been processed - :param product_list: List of specific products. Guaranteed to be at most one product per product name. - :param relativeOrbit: Path or track of satellite during data acquisition. For UAVSAR it is the Line ID. - :param season: Start and end day of year for desired seasonal range. This option is used in conjunction with start/end to specify a seasonal range within an overall date range. - :param start: Start date of data acquisition. Supports timestamps as well as natural language such as "3 weeks ago" - :param collections: List of collections (concept-ids) to limit search to - :param temporalBaselineDays: List of temporal baselines, used for Sentinel-1 Interferogram (BETA) - :param maxResults: The maximum number of results to be returned by the search - :param opts: An ASFSearchOptions object describing the search parameters to be used. Search parameters specified outside this object will override in event of a conflict. + Parameters + ---------- + absoluteOrbit: + For ALOS, ERS-1, ERS-2, JERS-1, and RADARSAT-1, Sentinel-1A, Sentinel-1B + this value corresponds to the orbit count within the orbit cycle. + For UAVSAR it is the Flight ID. + asfFrame: + This is primarily an ASF / JAXA frame reference. However, + some platforms use other conventions. See ‘frame’ for ESA-centric frame searches. + beamMode: + The beam mode used to acquire the data. + beamSwath: + Encompasses a look angle and beam mode. + campaign: + For UAVSAR and AIRSAR data collections only. Search by general location, + site description, or data grouping as supplied by flight agency or project. + maxDoppler: + Doppler provides an indication of how much the look direction deviates + from the ideal perpendicular flight direction acquisition. + minDoppler: + Doppler provides an indication of how much the look direction deviates + from the ideal perpendicular flight direction acquisition. + end: + End date of data acquisition. Supports timestamps + as well as natural language such as "3 weeks ago" + maxFaradayRotation: + Rotation of the polarization plane of + the radar signal impacts imagery, as HH and HV signals become mixed. + minFaradayRotation: + Rotation of the polarization plane of + the radar signal impacts imagery, as HH and HV signals become mixed. + flightDirection: + Satellite orbit direction during data acquisition + flightLine: + Specify a flightline for UAVSAR or AIRSAR. + frame: + ESA-referenced frames are offered to give users a universal framing convention. + Each ESA frame has a corresponding ASF frame assigned. See also: asfframe + granule_list: + List of specific granules. + Search results may include several products per granule name. + groupID: + Identifier used to find products considered to + be of the same scene but having different granule names + insarStackId: + Identifier used to find products of the same InSAR stack + instrument: + The instrument used to acquire the data. See also: platform + intersectsWith: + Search by polygon, linestring, + or point defined in 2D Well-Known Text (WKT) + lookDirection: + Left or right look direction during data acquisition + offNadirAngle: + Off-nadir angles for ALOS PALSAR + platform: + Remote sensing platform that acquired the data. + Platforms that work together, such as Sentinel-1A/1B and ERS-1/2 + have multi-platform aliases available. See also: instrument + polarization: + A property of SAR electromagnetic waves + that can be used to extract meaningful information about surface properties of the earth. + processingDate: + Used to find data that has been processed at ASF since a given + time and date. Supports timestamps as well as natural language such as "3 weeks ago" + processingLevel: + Level to which the data has been processed + product_list: + List of specific products. + Guaranteed to be at most one product per product name. + relativeOrbit: + Path or track of satellite during data acquisition. + For UAVSAR it is the Line ID. + season: + Start and end day of year for desired seasonal range. + This option is used in conjunction with start/end to specify a seasonal range + within an overall date range. + start: + Start date of data acquisition. + Supports timestamps as well as natural language such as "3 weeks ago" + collections: + List of collections (concept-ids) to limit search to + temporalBaselineDays: + List of temporal baselines, + used for Sentinel-1 Interferogram (BETA) + maxResults: + The maximum number of results to be returned by the search + opts: + An ASFSearchOptions object describing the search parameters to be used. + Search parameters specified outside this object will override in event of a conflict. - :return: ASFSearchResults(list) of search results + Returns + ------- + `asf_search.ASFSearchResults` (list of search results of subclass ASFProduct) """ kwargs = locals() - data = dict((k, v) for k, v in kwargs.items() if k not in ['host', 'opts'] and v is not None) + data = dict( + (k, v) for k, v in kwargs.items() if k not in ["host", "opts"] and v is not None + ) - opts = (ASFSearchOptions() if opts is None else copy(opts)) + opts = ASFSearchOptions() if opts is None else copy(opts) opts.merge_args(**data) results = ASFSearchResults([]) - + # The last page will be marked as complete if results sucessful for page in search_generator(opts=opts): results.extend(page) results.searchComplete = page.searchComplete results.searchOptions = page.searchOptions - + try: results.sort(key=lambda p: p.get_sort_keys(), reverse=True) except TypeError as exc: - ASF_LOGGER.warning(f"Failed to sort final results, leaving results unsorted. Reason: {exc}") - + ASF_LOGGER.warning( + f"Failed to sort final results, leaving results unsorted. Reason: {exc}" + ) + return results diff --git a/asf_search/search/search_count.py b/asf_search/search/search_count.py index 8de55742..c802bf79 100644 --- a/asf_search/search/search_count.py +++ b/asf_search/search/search_count.py @@ -8,50 +8,60 @@ from asf_search import INTERNAL -def search_count( - absoluteOrbit: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - asfFrame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - beamMode: Union[str, Sequence[str]] = None, - beamSwath: Union[str, Sequence[str]] = None, - campaign: Union[str, Sequence[str]] = None, - maxDoppler: float = None, - minDoppler: float = None, - end: Union[datetime.datetime, str] = None, - maxFaradayRotation: float = None, - minFaradayRotation: float = None, - flightDirection: str = None, - flightLine: str = None, - frame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - granule_list: Union[str, Sequence[str]] = None, - groupID: Union[str, Sequence[str]] = None, - insarStackId: str = None, - instrument: Union[str, Sequence[str]] = None, - intersectsWith: str = None, - lookDirection: Union[str, Sequence[str]] = None, - offNadirAngle: Union[float, Tuple[float, float], Sequence[Union[float, Tuple[float, float]]]] = None, - platform: Union[str, Sequence[str]] = None, - polarization: Union[str, Sequence[str]] = None, - processingDate: Union[datetime.datetime, str] = None, - processingLevel: Union[str, Sequence[str]] = None, - product_list: Union[str, Sequence[str]] = None, - relativeOrbit: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - season: Tuple[int, int] = None, - start: Union[datetime.datetime, str] = None, - absoluteBurstID: Union[int, Sequence[int]] = None, - relativeBurstID: Union[int, Sequence[int]] = None, - fullBurstID: Union[str, Sequence[str]] = None, - collections: Union[str, Sequence[str]] = None, - temporalBaselineDays: Union[str, Sequence[str]] = None, - operaBurstID: Union[str, Sequence[str]] = None, - dataset: Union[str, Sequence[str]] = None, - shortName: Union[str, Sequence[str]] = None, - cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None, - maxResults: int = None, - opts: ASFSearchOptions = None, +def search_count( + absoluteOrbit: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + asfFrame: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + beamMode: Union[str, Sequence[str]] = None, + beamSwath: Union[str, Sequence[str]] = None, + campaign: Union[str, Sequence[str]] = None, + maxDoppler: float = None, + minDoppler: float = None, + end: Union[datetime.datetime, str] = None, + maxFaradayRotation: float = None, + minFaradayRotation: float = None, + flightDirection: str = None, + flightLine: str = None, + frame: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + granule_list: Union[str, Sequence[str]] = None, + groupID: Union[str, Sequence[str]] = None, + insarStackId: str = None, + instrument: Union[str, Sequence[str]] = None, + intersectsWith: str = None, + lookDirection: Union[str, Sequence[str]] = None, + offNadirAngle: Union[ + float, Tuple[float, float], Sequence[Union[float, Tuple[float, float]]] + ] = None, + platform: Union[str, Sequence[str]] = None, + polarization: Union[str, Sequence[str]] = None, + processingDate: Union[datetime.datetime, str] = None, + processingLevel: Union[str, Sequence[str]] = None, + product_list: Union[str, Sequence[str]] = None, + relativeOrbit: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + season: Tuple[int, int] = None, + start: Union[datetime.datetime, str] = None, + absoluteBurstID: Union[int, Sequence[int]] = None, + relativeBurstID: Union[int, Sequence[int]] = None, + fullBurstID: Union[str, Sequence[str]] = None, + collections: Union[str, Sequence[str]] = None, + temporalBaselineDays: Union[str, Sequence[str]] = None, + operaBurstID: Union[str, Sequence[str]] = None, + dataset: Union[str, Sequence[str]] = None, + shortName: Union[str, Sequence[str]] = None, + cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None, + maxResults: int = None, + opts: ASFSearchOptions = None, ) -> int: # Create a kwargs dict, that's all of the 'not None' items, and merge it with opts: kwargs = locals() - opts = (ASFSearchOptions() if kwargs["opts"] is None else copy(opts)) + opts = ASFSearchOptions() if kwargs["opts"] is None else copy(opts) del kwargs["opts"] kwargs = dict((k, v) for k, v in kwargs.items() if v is not None) @@ -62,14 +72,18 @@ def search_count( preprocess_opts(opts) - url = '/'.join(s.strip('/') for s in [f'https://{opts.host}', f'{INTERNAL.CMR_GRANULE_PATH}']) + url = "/".join( + s.strip("/") for s in [f"https://{opts.host}", f"{INTERNAL.CMR_GRANULE_PATH}"] + ) count = 0 for query in build_subqueries(opts): translated_opts = translate_opts(query) - idx = translated_opts.index(('page_size', INTERNAL.CMR_PAGE_SIZE)) - translated_opts[idx] = ('page_size', 0) - - response = get_page(session=opts.session, url=url, translated_opts=translated_opts) - count += response.json()['hits'] + idx = translated_opts.index(("page_size", INTERNAL.CMR_PAGE_SIZE)) + translated_opts[idx] = ("page_size", 0) + + response = get_page( + session=opts.session, url=url, translated_opts=translated_opts + ) + count += response.json()["hits"] return count diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index 10b64892..feb13ebd 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -2,11 +2,17 @@ from copy import copy from requests.exceptions import HTTPError from requests import ReadTimeout, Response -from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential, wait_fixed +from tenacity import ( + retry, + retry_if_exception_type, + stop_after_attempt, + wait_exponential, + wait_fixed, +) import datetime import dateparser -from asf_search import ASF_LOGGER, __version__ +from asf_search import ASF_LOGGER from asf_search.ASFSearchResults import ASFSearchResults from asf_search.ASFSearchOptions import ASFSearchOptions @@ -15,57 +21,176 @@ from asf_search.ASFSession import ASFSession from asf_search.ASFProduct import ASFProduct -from asf_search.exceptions import ASFSearch4xxError, ASFSearch5xxError, ASFSearchError, CMRIncompleteError +from asf_search.exceptions import ( + ASFSearch4xxError, + ASFSearch5xxError, + ASFSearchError, + CMRIncompleteError, +) from asf_search.constants import INTERNAL from asf_search.WKT.validate_wkt import validate_wkt from asf_search.search.error_reporting import report_search_error import asf_search.Products as ASFProductType -from shapely.geometry.base import BaseGeometry + def search_generator( - absoluteOrbit: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - asfFrame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - beamMode: Union[str, Sequence[str]] = None, - beamSwath: Union[str, Sequence[str]] = None, - campaign: Union[str, Sequence[str]] = None, - maxDoppler: float = None, - minDoppler: float = None, - end: Union[datetime.datetime, str] = None, - maxFaradayRotation: float = None, - minFaradayRotation: float = None, - flightDirection: str = None, - flightLine: str = None, - frame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - granule_list: Union[str, Sequence[str]] = None, - groupID: Union[str, Sequence[str]] = None, - insarStackId: str = None, - instrument: Union[str, Sequence[str]] = None, - intersectsWith: str = None, - lookDirection: Union[str, Sequence[str]] = None, - offNadirAngle: Union[float, Tuple[float, float], Sequence[Union[float, Tuple[float, float]]]] = None, - platform: Union[str, Sequence[str]] = None, - polarization: Union[str, Sequence[str]] = None, - processingDate: Union[datetime.datetime, str] = None, - processingLevel: Union[str, Sequence[str]] = None, - product_list: Union[str, Sequence[str]] = None, - relativeOrbit: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, - season: Tuple[int, int] = None, - start: Union[datetime.datetime, str] = None, - absoluteBurstID: Union[int, Sequence[int]] = None, - relativeBurstID: Union[int, Sequence[int]] = None, - fullBurstID: Union[str, Sequence[str]] = None, - collections: Union[str, Sequence[str]] = None, - temporalBaselineDays: Union[str, Sequence[str]] = None, - operaBurstID: Union[str, Sequence[str]] = None, - dataset: Union[str, Sequence[str]] = None, - shortName: Union[str, Sequence[str]] = None, - cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None, - maxResults: int = None, - opts: ASFSearchOptions = None, - ) -> Generator[ASFSearchResults, None, None]: + absoluteOrbit: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + asfFrame: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + beamMode: Union[str, Sequence[str]] = None, + beamSwath: Union[str, Sequence[str]] = None, + campaign: Union[str, Sequence[str]] = None, + maxDoppler: float = None, + minDoppler: float = None, + end: Union[datetime.datetime, str] = None, + maxFaradayRotation: float = None, + minFaradayRotation: float = None, + flightDirection: str = None, + flightLine: str = None, + frame: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + granule_list: Union[str, Sequence[str]] = None, + groupID: Union[str, Sequence[str]] = None, + insarStackId: str = None, + instrument: Union[str, Sequence[str]] = None, + intersectsWith: str = None, + lookDirection: Union[str, Sequence[str]] = None, + offNadirAngle: Union[ + float, Tuple[float, float], Sequence[Union[float, Tuple[float, float]]] + ] = None, + platform: Union[str, Sequence[str]] = None, + polarization: Union[str, Sequence[str]] = None, + processingDate: Union[datetime.datetime, str] = None, + processingLevel: Union[str, Sequence[str]] = None, + product_list: Union[str, Sequence[str]] = None, + relativeOrbit: Union[ + int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] + ] = None, + season: Tuple[int, int] = None, + start: Union[datetime.datetime, str] = None, + absoluteBurstID: Union[int, Sequence[int]] = None, + relativeBurstID: Union[int, Sequence[int]] = None, + fullBurstID: Union[str, Sequence[str]] = None, + collections: Union[str, Sequence[str]] = None, + temporalBaselineDays: Union[str, Sequence[str]] = None, + operaBurstID: Union[str, Sequence[str]] = None, + dataset: Union[str, Sequence[str]] = None, + shortName: Union[str, Sequence[str]] = None, + cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None, + maxResults: int = None, + opts: ASFSearchOptions = None, +) -> Generator[ASFSearchResults, None, None]: + """ + Performs a generic search against the Central Metadata Repository (CMR), + yielding results page by page (250 products at a time by default) as they're returned from CMR. + + Accepts a number of search parameters, and/or an ASFSearchOptions object. + If an ASFSearchOptions object is provided as well as other specific parameters, + the two sets of options will be merged, preferring the specific keyword arguments. + + Parameters + ---------- + absoluteOrbit: + For ALOS, ERS-1, ERS-2, JERS-1, and RADARSAT-1, Sentinel-1A, Sentinel-1B + this value corresponds to the orbit count within the orbit cycle. + For UAVSAR it is the Flight ID. + asfFrame: + This is primarily an ASF / JAXA frame reference. However, + some platforms use other conventions. See ‘frame’ for ESA-centric frame searches. + beamMode: + The beam mode used to acquire the data. + beamSwath: + Encompasses a look angle and beam mode. + campaign: + For UAVSAR and AIRSAR data collections only. Search by general location, + site description, or data grouping as supplied by flight agency or project. + maxDoppler: + Doppler provides an indication of how much the look direction deviates + from the ideal perpendicular flight direction acquisition. + minDoppler: + Doppler provides an indication of how much the look direction deviates + from the ideal perpendicular flight direction acquisition. + end: + End date of data acquisition. Supports timestamps + as well as natural language such as "3 weeks ago" + maxFaradayRotation: + Rotation of the polarization plane of + the radar signal impacts imagery, as HH and HV signals become mixed. + minFaradayRotation: + Rotation of the polarization plane of + the radar signal impacts imagery, as HH and HV signals become mixed. + flightDirection: + Satellite orbit direction during data acquisition + flightLine: + Specify a flightline for UAVSAR or AIRSAR. + frame: + ESA-referenced frames are offered to give users a universal framing convention. + Each ESA frame has a corresponding ASF frame assigned. See also: asfframe + granule_list: + List of specific granules. + Search results may include several products per granule name. + groupID: + Identifier used to find products considered to + be of the same scene but having different granule names + insarStackId: + Identifier used to find products of the same InSAR stack + instrument: + The instrument used to acquire the data. See also: platform + intersectsWith: + Search by polygon, linestring, + or point defined in 2D Well-Known Text (WKT) + lookDirection: + Left or right look direction during data acquisition + offNadirAngle: + Off-nadir angles for ALOS PALSAR + platform: + Remote sensing platform that acquired the data. + Platforms that work together, such as Sentinel-1A/1B and ERS-1/2 + have multi-platform aliases available. See also: instrument + polarization: + A property of SAR electromagnetic waves + that can be used to extract meaningful information about surface properties of the earth. + processingDate: + Used to find data that has been processed at ASF since a given + time and date. Supports timestamps as well as natural language such as "3 weeks ago" + processingLevel: + Level to which the data has been processed + product_list: + List of specific products. + Guaranteed to be at most one product per product name. + relativeOrbit: + Path or track of satellite during data acquisition. + For UAVSAR it is the Line ID. + season: + Start and end day of year for desired seasonal range. + This option is used in conjunction with start/end to specify a seasonal range + within an overall date range. + start: + Start date of data acquisition. + Supports timestamps as well as natural language such as "3 weeks ago" + collections: + List of collections (concept-ids) to limit search to + temporalBaselineDays: + List of temporal baselines, + used for Sentinel-1 Interferogram (BETA) + maxResults: + The maximum number of results to be returned by the search + opts: + An ASFSearchOptions object describing the search parameters to be used. + Search parameters specified outside this object will override in event of a conflict. + + Yields + ------- + `asf_search.ASFSearchResults` (list of search results of subclass ASFProduct, page by page) + """ + # Create a kwargs dict, that's all of the 'not None' items, and merge it with opts: kwargs = locals() - opts = (ASFSearchOptions() if kwargs["opts"] is None else copy(opts)) + opts = ASFSearchOptions() if kwargs["opts"] is None else copy(opts) del kwargs["opts"] kwargs = dict((k, v) for k, v in kwargs.items() if v is not None) @@ -76,102 +201,158 @@ def search_generator( maxResults = opts.pop("maxResults", None) - if maxResults is not None and \ - (getattr(opts, 'granule_list', False) or getattr(opts, 'product_list', False)): - raise ValueError("Cannot use maxResults along with product_list/granule_list.") - - ASF_LOGGER.debug(f'SEARCH: preprocessing opts: {opts}') + if maxResults is not None and ( + getattr(opts, "granule_list", False) or getattr(opts, "product_list", False) + ): + raise ValueError("Cannot use maxResults along with product_list/granule_list.") + + ASF_LOGGER.debug(f"SEARCH: preprocessing opts: {opts}") preprocess_opts(opts) - ASF_LOGGER.debug(f'SEARCH: preprocessed opts: {opts}') - - ASF_LOGGER.info(f'SEARCH: Using search opts {opts}') + ASF_LOGGER.debug(f"SEARCH: preprocessed opts: {opts}") - url = '/'.join(s.strip('/') for s in [f'https://{opts.host}', f'{INTERNAL.CMR_GRANULE_PATH}']) + ASF_LOGGER.info(f"SEARCH: Using search opts {opts}") + + url = "/".join( + s.strip("/") for s in [f"https://{opts.host}", f"{INTERNAL.CMR_GRANULE_PATH}"] + ) total = 0 queries = build_subqueries(opts) ASF_LOGGER.info(f'SEARCH: Using cmr endpoint: "{url}"') - ASF_LOGGER.debug(f'SEARCH: Built {len(queries)} subqueries') - + ASF_LOGGER.debug(f"SEARCH: Built {len(queries)} subqueries") + for subquery_idx, query in enumerate(queries): - ASF_LOGGER.info(f'SUBQUERY {subquery_idx + 1}: Beginning subquery with opts: {query}') + ASF_LOGGER.info( + f"SUBQUERY {subquery_idx + 1}: Beginning subquery with opts: {query}" + ) - ASF_LOGGER.debug(f'TRANSLATION: Translating subquery:\n{query}') + ASF_LOGGER.debug(f"TRANSLATION: Translating subquery:\n{query}") translated_opts = translate_opts(query) - ASF_LOGGER.debug(f'TRANSLATION: Subquery translated to cmr keywords:\n{translated_opts}') + ASF_LOGGER.debug( + f"TRANSLATION: Subquery translated to cmr keywords:\n{translated_opts}" + ) cmr_search_after_header = "" subquery_count = 0 page_number = 1 - while(cmr_search_after_header is not None): + while cmr_search_after_header is not None: try: - ASF_LOGGER.debug(f'SUBQUERY {subquery_idx + 1}: Fetching page {page_number}') - items, subquery_max_results, cmr_search_after_header = query_cmr(opts.session, url, translated_opts, subquery_count) + ASF_LOGGER.debug( + f"SUBQUERY {subquery_idx + 1}: Fetching page {page_number}" + ) + items, subquery_max_results, cmr_search_after_header = query_cmr( + opts.session, + url, + translated_opts, + subquery_count + ) except (ASFSearchError, CMRIncompleteError) as e: message = str(e) ASF_LOGGER.error(message) report_search_error(query, message) - opts.session.headers.pop('CMR-Search-After', None) + opts.session.headers.pop("CMR-Search-After", None) return - ASF_LOGGER.debug(f'SUBQUERY {subquery_idx + 1}: Page {page_number} fetched, returned {len(items)} items.') - opts.session.headers.update({'CMR-Search-After': cmr_search_after_header}) - last_page = process_page(items, maxResults, subquery_max_results, total, subquery_count, opts) + ASF_LOGGER.debug( + f'SUBQUERY {subquery_idx + 1}: Page {page_number} fetched,' + f'returned {len(items)} items.' + ) + opts.session.headers.update({"CMR-Search-After": cmr_search_after_header}) + last_page = process_page( + items, maxResults, subquery_max_results, total, subquery_count, opts + ) subquery_count += len(last_page) total += len(last_page) - last_page.searchComplete = subquery_count == subquery_max_results or total == maxResults + last_page.searchComplete = ( + subquery_count == subquery_max_results or total == maxResults + ) yield last_page if last_page.searchComplete: - if total == maxResults: # the user has as many results as they wanted - ASF_LOGGER.info(f'SEARCH COMPLETE: MaxResults ({maxResults}) reached') - opts.session.headers.pop('CMR-Search-After', None) + if total == maxResults: # the user has as many results as they wanted + ASF_LOGGER.info( + f"SEARCH COMPLETE: MaxResults ({maxResults}) reached" + ) + opts.session.headers.pop("CMR-Search-After", None) return - else: # or we've gotten all possible results for this subquery - ASF_LOGGER.info(f'SUBQUERY {subquery_idx + 1} COMPLETE: results exhausted for subquery') + else: # or we've gotten all possible results for this subquery + ASF_LOGGER.info( + f"SUBQUERY {subquery_idx + 1} COMPLETE: results exhausted for subquery" + ) cmr_search_after_header = None - - page_number += 1 - opts.session.headers.pop('CMR-Search-After', None) - - ASF_LOGGER.info(f'SEARCH COMPLETE: results exhausted for search opts {opts}') + page_number += 1 -@retry(reraise=True, - retry=retry_if_exception_type(CMRIncompleteError), - wait=wait_fixed(2), - stop=stop_after_attempt(3), + opts.session.headers.pop("CMR-Search-After", None) + + ASF_LOGGER.info(f"SEARCH COMPLETE: results exhausted for search opts {opts}") + + +@retry( + reraise=True, + retry=retry_if_exception_type(CMRIncompleteError), + wait=wait_fixed(2), + stop=stop_after_attempt(3), +) +def query_cmr( + session: ASFSession, + url: str, + translated_opts: Dict, + sub_query_count: int, +): + response = get_page( + session=session, url=url, translated_opts=translated_opts ) -def query_cmr(session: ASFSession, url: str, translated_opts: Dict, sub_query_count: int): - response = get_page(session=session, url=url, translated_opts=translated_opts) - items = [as_ASFProduct(f, session=session) for f in response.json()['items']] - hits: int = response.json()['hits'] # total count of products given search opts + items = [as_ASFProduct(f, session=session) for f in response.json()["items"]] + hits: int = response.json()["hits"] # total count of products given search opts # sometimes CMR returns results with the wrong page size if len(items) != INTERNAL.CMR_PAGE_SIZE and len(items) + sub_query_count < hits: - raise CMRIncompleteError(f"CMR returned page of incomplete results. Expected {min(INTERNAL.CMR_PAGE_SIZE, hits - sub_query_count)} results, got {len(items)}") - - return items, hits, response.headers.get('CMR-Search-After', None) - - -def process_page(items: List[ASFProduct], max_results: int, subquery_max_results: int, total: int, subquery_count: int, opts: ASFSearchOptions): + raise CMRIncompleteError( + 'CMR returned page of incomplete results.' + f'Expected {min(INTERNAL.CMR_PAGE_SIZE, hits - sub_query_count)} results,' + f'got {len(items)}' + ) + + return items, hits, response.headers.get("CMR-Search-After", None) + + +def process_page( + items: List[ASFProduct], + max_results: int, + subquery_max_results: int, + total: int, + subquery_count: int, + opts: ASFSearchOptions, +): if max_results is None: - last_page = ASFSearchResults(items[:min(subquery_max_results - subquery_count, len(items))], opts=opts) + last_page = ASFSearchResults( + items[: min(subquery_max_results - subquery_count, len(items))], opts=opts + ) else: - last_page = ASFSearchResults(items[:min(max_results - total, len(items))], opts=opts) + last_page = ASFSearchResults( + items[: min(max_results - total, len(items))], opts=opts + ) return last_page -@retry(reraise=True, - retry=retry_if_exception_type(ASFSearch5xxError), - wait=wait_exponential(multiplier=1, min=3, max=10), # Wait 2^x * 1 starting with 3 seconds, max 10 seconds between retries - stop=stop_after_attempt(3), - ) -def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: +@retry( + reraise=True, + retry=retry_if_exception_type(ASFSearch5xxError), + wait=wait_exponential( + multiplier=1, min=3, max=10 + ), # Wait 2^x * 1 starting with 3 seconds, max 10 seconds between retries + stop=stop_after_attempt(3), +) +def get_page( + session: ASFSession, url: str, translated_opts: List +) -> Response: try: - response = session.post(url=url, data=translated_opts, timeout=INTERNAL.CMR_TIMEOUT) + response = session.post( + url=url, data=translated_opts, timeout=INTERNAL.CMR_TIMEOUT + ) response.raise_for_status() except HTTPError as exc: error_message = f'HTTP {response.status_code}: {response.json()["errors"]}' @@ -180,7 +361,10 @@ def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: if 500 <= response.status_code <= 599: raise ASFSearch5xxError(error_message) from exc except ReadTimeout as exc: - raise ASFSearchError(f'Connection Error (Timeout): CMR took too long to respond. Set asf constant "CMR_TIMEOUT" to increase. ({url=}, timeout={INTERNAL.CMR_TIMEOUT})') from exc + raise ASFSearchError( + 'Connection Error (Timeout): CMR took too long to respond.' + f'Set asf constant "CMR_TIMEOUT" to increase. ({url=}, timeout={INTERNAL.CMR_TIMEOUT})' + ) from exc return response @@ -201,24 +385,33 @@ def wrap_wkt(opts: ASFSearchOptions): wrapped, _, repairs = validate_wkt(opts.intersectsWith) opts.intersectsWith = wrapped.wkt if len(repairs): - ASF_LOGGER.warning(f"WKT REPAIR/VALIDATION: The following repairs were performed on the provided AOI:\n{[str(repair) for repair in repairs]}") + ASF_LOGGER.warning( + 'WKT REPAIR/VALIDATION: The following repairs were performed' + f'on the provided AOI:\n{[str(repair) for repair in repairs]}' + ) def set_default_dates(opts: ASFSearchOptions): if opts.start is not None and isinstance(opts.start, str): - opts.start = dateparser.parse(opts.start, settings={'RETURN_AS_TIMEZONE_AWARE': True}) + opts.start = dateparser.parse( + opts.start, settings={"RETURN_AS_TIMEZONE_AWARE": True} + ) if opts.end is not None and isinstance(opts.end, str): - opts.end = dateparser.parse(opts.end, settings={'RETURN_AS_TIMEZONE_AWARE': True}) + opts.end = dateparser.parse( + opts.end, settings={"RETURN_AS_TIMEZONE_AWARE": True} + ) # If both are used, make sure they're in the right order: if opts.start is not None and opts.end is not None: if opts.start > opts.end: - ASF_LOGGER.warning(f"Start date ({opts.start}) is after end date ({opts.end}). Switching the two.") + ASF_LOGGER.warning( + f"Start date ({opts.start}) is after end date ({opts.end}). Switching the two." + ) opts.start, opts.end = opts.end, opts.start # Can't do this sooner, since you need to compare start vs end: if opts.start is not None: - opts.start = opts.start.strftime('%Y-%m-%dT%H:%M:%SZ') + opts.start = opts.start.strftime("%Y-%m-%dT%H:%M:%SZ") if opts.end is not None: - opts.end = opts.end.strftime('%Y-%m-%dT%H:%M:%SZ') + opts.end = opts.end.strftime("%Y-%m-%dT%H:%M:%SZ") def set_platform_alias(opts: ASFSearchOptions): @@ -226,26 +419,26 @@ def set_platform_alias(opts: ASFSearchOptions): if opts.platform is not None: plat_aliases = { # Groups: - 'S1': ['SENTINEL-1A', 'SENTINEL-1B'], - 'SENTINEL-1': ['SENTINEL-1A', 'SENTINEL-1B'], - 'SENTINEL': ['SENTINEL-1A', 'SENTINEL-1B'], - 'ERS': ['ERS-1', 'ERS-2'], - 'SIR-C': ['STS-59', 'STS-68'], + "S1": ["SENTINEL-1A", "SENTINEL-1B"], + "SENTINEL-1": ["SENTINEL-1A", "SENTINEL-1B"], + "SENTINEL": ["SENTINEL-1A", "SENTINEL-1B"], + "ERS": ["ERS-1", "ERS-2"], + "SIR-C": ["STS-59", "STS-68"], # Singles / Aliases: - 'R1': ['RADARSAT-1'], - 'E1': ['ERS-1'], - 'E2': ['ERS-2'], - 'J1': ['JERS-1'], - 'A3': ['ALOS'], - 'AS': ['DC-8'], - 'AIRSAR': ['DC-8'], - 'SS': ['SEASAT 1'], - 'SEASAT': ['SEASAT 1'], - 'SA': ['SENTINEL-1A'], - 'SB': ['SENTINEL-1B'], - 'SP': ['SMAP'], - 'UA': ['G-III'], - 'UAVSAR': ['G-III'], + "R1": ["RADARSAT-1"], + "E1": ["ERS-1"], + "E2": ["ERS-2"], + "J1": ["JERS-1"], + "A3": ["ALOS"], + "AS": ["DC-8"], + "AIRSAR": ["DC-8"], + "SS": ["SEASAT 1"], + "SEASAT": ["SEASAT 1"], + "SA": ["SENTINEL-1A"], + "SB": ["SENTINEL-1B"], + "SP": ["SMAP"], + "UA": ["G-III"], + "UAVSAR": ["G-III"], } platform_list = [] for plat in opts.platform: @@ -257,8 +450,9 @@ def set_platform_alias(opts: ASFSearchOptions): opts.platform = list(set(platform_list)) + def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: - """ Returns the granule umm as the corresponding ASFProduct subclass, + """Returns the granule umm as the corresponding ASFProduct subclass, or ASFProduct if no equivalent is found :param item: the granule umm json @@ -282,77 +476,75 @@ def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: subclass = dataset_to_product_types.get(dataset) if subclass is not None: return subclass(item, session=session) - break # dataset exists, but is not in dataset_to_product_types yet + break # dataset exists, but is not in dataset_to_product_types yet # If the platform exists, try to match it platform = _get_platform(item=item) if ASFProductType.ARIAS1GUNWProduct._is_subclass(item=item): - return dataset_to_product_types.get('ARIA S1 GUNW')(item, session=session) + return dataset_to_product_types.get("ARIA S1 GUNW")(item, session=session) elif (subclass := dataset_to_product_types.get(platform)) is not None: return subclass(item, session=session) - + output = ASFProduct(item, session=session) - - granule_concept_id = output.meta.get('concept-id', 'Missing Granule Concept ID') - fileID = output.properties.get('fileID', output.properties.get('sceneName', 'fileID and sceneName Missing')) - ASF_LOGGER.warning(f'Failed to find corresponding ASFProduct subclass for \ - Product: "{fileID}", Granule Concept ID: "{granule_concept_id}", default to "ASFProduct"') + granule_concept_id = output.meta.get("concept-id", "Missing Granule Concept ID") + fileID = output.properties.get( + "fileID", output.properties.get("sceneName", "fileID and sceneName Missing") + ) + + ASF_LOGGER.warning( + f'Failed to find corresponding ASFProduct subclass for \ + Product: "{fileID}", Granule Concept ID: "{granule_concept_id}", \ + default to "ASFProduct"') return output + def _get_product_type_key(item: Dict) -> str: """Match the umm response to the right ASFProduct subclass by returning one of the following: - 1. collection shortName (Ideal case) - 2. platform_shortName (Fallback) - - special case: Aria S1 GUNW + 1. collection shortName (Ideal case) + 2. platform_shortName (Fallback) + - special case: Aria S1 GUNW """ - collection_shortName = ASFProduct.umm_get(item['umm'], 'CollectionReference', 'ShortName') + collection_shortName = ASFProduct.umm_get( + item["umm"], "CollectionReference", "ShortName" + ) if collection_shortName is None: if ASFProductType.ARIAS1GUNWProduct._is_subclass(item=item): - return 'ARIA S1 GUNW' + return "ARIA S1 GUNW" platform = _get_platform(item=item) return platform return collection_shortName + def _get_platform(item: Dict): - return ASFProduct.umm_get(item['umm'], 'Platforms', 0, 'ShortName') + return ASFProduct.umm_get(item["umm"], "Platforms", 0, "ShortName") + # Maps datasets from DATASET.py and collection/platform shortnames to ASFProduct subclasses dataset_to_product_types = { - 'SENTINEL-1': ASFProductType.S1Product, - 'OPERA-S1': ASFProductType.OPERAS1Product, - 'OPERA-S1-CALVAL': ASFProductType.OPERAS1Product, - 'SLC-BURST': ASFProductType.S1BurstProduct, - - 'ALOS': ASFProductType.ALOSProduct, - - 'SIR-C': ASFProductType.SIRCProduct, - 'STS-59': ASFProductType.SIRCProduct, - 'STS-68': ASFProductType.SIRCProduct, - - 'ARIA S1 GUNW': ASFProductType.ARIAS1GUNWProduct, - - 'SMAP': ASFProductType.SMAPProduct, - - 'UAVSAR': ASFProductType.UAVSARProduct, - 'G-III': ASFProductType.UAVSARProduct, - - 'RADARSAT-1': ASFProductType.RADARSATProduct, - - 'ERS': ASFProductType.ERSProduct, - 'ERS-1': ASFProductType.ERSProduct, - 'ERS-2': ASFProductType.ERSProduct, - - 'JERS-1': ASFProductType.JERSProduct, - - 'AIRSAR': ASFProductType.AIRSARProduct, - 'DC-8': ASFProductType.AIRSARProduct, - - 'SEASAT': ASFProductType.SEASATProduct, - 'SEASAT 1': ASFProductType.SEASATProduct, - - 'NISAR': ASFProductType.NISARProduct + "SENTINEL-1": ASFProductType.S1Product, + "OPERA-S1": ASFProductType.OPERAS1Product, + "OPERA-S1-CALVAL": ASFProductType.OPERAS1Product, + "SLC-BURST": ASFProductType.S1BurstProduct, + "ALOS": ASFProductType.ALOSProduct, + "SIR-C": ASFProductType.SIRCProduct, + "STS-59": ASFProductType.SIRCProduct, + "STS-68": ASFProductType.SIRCProduct, + "ARIA S1 GUNW": ASFProductType.ARIAS1GUNWProduct, + "SMAP": ASFProductType.SMAPProduct, + "UAVSAR": ASFProductType.UAVSARProduct, + "G-III": ASFProductType.UAVSARProduct, + "RADARSAT-1": ASFProductType.RADARSATProduct, + "ERS": ASFProductType.ERSProduct, + "ERS-1": ASFProductType.ERSProduct, + "ERS-2": ASFProductType.ERSProduct, + "JERS-1": ASFProductType.JERSProduct, + "AIRSAR": ASFProductType.AIRSARProduct, + "DC-8": ASFProductType.AIRSARProduct, + "SEASAT": ASFProductType.SEASATProduct, + "SEASAT 1": ASFProductType.SEASATProduct, + "NISAR": ASFProductType.NISARProduct, } From 411ecf514f8a2d4ceb349fb352724bc9d7eeca0d Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 15 Jul 2024 11:40:24 -0800 Subject: [PATCH 28/40] adds push hook to lint workflow --- .github/workflows/lint.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 22a36ffa..48b80ac4 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,7 +1,9 @@ name: Lint -on: - pull_request: +on: + pull_request: + types: [opened, edited, reopened] + push: jobs: lint: From 8fe498940fe03440ef58f8a2da6d1f8c64979532 Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 15 Jul 2024 11:44:41 -0800 Subject: [PATCH 29/40] modifies test workflow triggers --- .github/workflows/run-pytest.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/run-pytest.yml b/.github/workflows/run-pytest.yml index 855336a6..9be6fe7f 100644 --- a/.github/workflows/run-pytest.yml +++ b/.github/workflows/run-pytest.yml @@ -1,6 +1,9 @@ name: tests -on: [pull_request, push] +on: + pull_request: + types: [opened, edited, reopened] + push: jobs: run-tests: From 02ec00b19681cd09d56d99bd37ca0483c63ba753 Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 15 Jul 2024 13:19:19 -0800 Subject: [PATCH 30/40] fixes metalink output --- asf_search/export/metalink.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/asf_search/export/metalink.py b/asf_search/export/metalink.py index 7171f624..ca513de9 100644 --- a/asf_search/export/metalink.py +++ b/asf_search/export/metalink.py @@ -18,13 +18,14 @@ class MetalinkStreamArray(list): def __init__(self, results): self.pages = results self.len = 1 - self.header = """ - - Alaska Satellite Facilityhttp://www.asf.alaska.edu/ # noqa F401 - """ - - self.footer = """ - \n""" + self.header = ( + '' + '\n' + 'Alaska Satellite Facilityhttp://www.asf.alaska.edu/\n' # noqa F401 + '' + ) + + self.footer = '\n\n' def get_additional_fields(self, product): return {} From abcbc0d39530707ca336c66f1f14c7ef2b28f943 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 10:31:52 -0800 Subject: [PATCH 31/40] Fixes customized CMR_TIMEOUT, sets timeout to 60 on intersection test --- asf_search/search/search_generator.py | 6 +++--- tests/ASFSearchResults/test_ASFSearchResults.py | 15 ++++++++++----- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index c7568118..e4bc4b12 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -185,10 +185,10 @@ def process_page(items: List[ASFProduct], max_results: int, subquery_max_results stop=stop_after_attempt(3), ) def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: - + from asf_search.constants.INTERNAL import CMR_TIMEOUT perf = time.time() try: - response = session.post(url=url, data=translated_opts, timeout=INTERNAL.CMR_TIMEOUT) + response = session.post(url=url, data=translated_opts, timeout=CMR_TIMEOUT) response.raise_for_status() except HTTPError as exc: error_message = f'HTTP {response.status_code}: {response.json()["errors"]}' @@ -197,7 +197,7 @@ def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: if 500 <= response.status_code <= 599: raise ASFSearch5xxError(error_message) from exc except ReadTimeout as exc: - raise ASFSearchError(f'Connection Error (Timeout): CMR took too long to respond. Set asf constant "CMR_TIMEOUT" to increase. ({url=}, timeout={INTERNAL.CMR_TIMEOUT})') from exc + raise ASFSearchError(f'Connection Error (Timeout): CMR took too long to respond. Set asf constant "asf_search.constants.INTERNAL.CMR_TIMEOUT" to increase. ({url=}, timeout={CMR_TIMEOUT})') from exc ASF_LOGGER.warning(f"Query Time Elapsed {time.time() - perf}") return response diff --git a/tests/ASFSearchResults/test_ASFSearchResults.py b/tests/ASFSearchResults/test_ASFSearchResults.py index 7f80a124..397bc051 100644 --- a/tests/ASFSearchResults/test_ASFSearchResults.py +++ b/tests/ASFSearchResults/test_ASFSearchResults.py @@ -18,6 +18,8 @@ from asf_search.constants import PLATFORM import re +from asf_search.exceptions import ASFSearchError + # when this replaces SearchAPI change values to cached API_URL = 'https://api.daac.asf.alaska.edu/services/search/param?' @@ -198,18 +200,21 @@ def run_test_ASFSearchResults_intersection(wkt: str): # exclude SMAP products platforms = [ - PLATFORM.ALOS, PLATFORM.SENTINEL1, - PLATFORM.SIRC, PLATFORM.UAVSAR ] def overlap_check(s1: BaseGeometry, s2: BaseGeometry): return s1.overlaps(s2) or s1.touches(s2) or s2.distance(s1) <= 0.005 - + asf.constants.INTERNAL.CMR_TIMEOUT = 60 for platform in platforms: - results = asf.geo_search(intersectsWith=wkt, platform=platform, maxResults=250) - + try: + results = asf.geo_search(intersectsWith=wkt, platform=platform, maxResults=250) + except ASFSearchError as exc: + asf.constants.INTERNAL.CMR_TIMEOUT = 30 + raise BaseException(f'Failed to perform intersection test with wkt: {wkt}\nplatform: {platform}.\nOriginal exception: {exc}') + + asf.constants.INTERNAL.CMR_TIMEOUT = 30 for product in results: if shape(product.geometry).is_valid: product_geom_wrapped, product_geom_unwrapped, _ = asf.validate_wkt(shape(product.geometry)) From fde4224c5e0666632db2fccf43ba1da885f19d55 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 11:53:33 -0800 Subject: [PATCH 32/40] changes _properties_paths back to _base_properties --- asf_search/ASFProduct.py | 17 +------- asf_search/CMR/subquery.py | 22 ---------- asf_search/Products/AIRSARProduct.py | 2 +- asf_search/Products/ALOSProduct.py | 2 +- asf_search/Products/ARIAS1GUNWProduct.py | 2 +- asf_search/Products/ERSProduct.py | 2 +- asf_search/Products/JERSProduct.py | 2 +- asf_search/Products/NISARProduct.py | 2 +- asf_search/Products/OPERAS1Product.py | 2 +- asf_search/Products/RADARSATProduct.py | 2 +- asf_search/Products/S1BurstProduct.py | 2 +- asf_search/Products/S1Product.py | 2 +- asf_search/Products/SEASATProduct.py | 2 +- asf_search/Products/SIRCProduct.py | 2 +- asf_search/Products/SMAPProduct.py | 2 +- asf_search/Products/UAVSARProduct.py | 2 +- asf_search/search/baseline_search.py | 2 +- examples/5-Download.ipynb | 52 ++++++++++++++++++++++-- 18 files changed, 66 insertions(+), 55 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index 55f5290a..305352c5 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -41,7 +41,7 @@ class ASFProduct: def get_classname(cls): return cls.__name__ - _properties_paths = { + _base_properties = { # min viable product 'centerLat': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0], 'cast': try_parse_float}, 'centerLon': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0], 'cast': try_parse_float}, @@ -67,7 +67,7 @@ def get_classname(cls): 'sensor': {'path': [ 'Platforms', 0, 'Instruments', 0, 'ShortName'], }, } """ - _properties_paths dictionary, mapping readable property names to paths and optional type casting + _base_properties dictionary, mapping readable property names to paths and optional type casting entries are organized as such: - `PROPERTY_NAME`: The name the property should be called in `ASFProduct.properties` @@ -282,19 +282,6 @@ def translate_product(self, item: Dict) -> Dict: return {'geometry': geometry, 'properties': properties, 'type': 'Feature'} - # ASFProduct subclasses define extra/override param key + UMM pathing here - @staticmethod - def get_property_paths() -> Dict: - """ - Returns _base_properties of class, subclasses such as `S1Product` (or user provided subclasses) can override this to - define which properties they want in their subclass's properties dict. - - (See `S1Product.get_property_paths()` for example of combining _base_properties of multiple classes) - - :returns dictionary, {`PROPERTY_NAME`: {'path': [umm, path, to, value], 'cast (optional)': Callable_to_cast_value}, ...} - """ - return ASFProduct._base_properties - def get_sort_keys(self) -> Tuple[str, str]: """ Returns tuple of primary and secondary date values used for sorting final search results diff --git a/asf_search/CMR/subquery.py b/asf_search/CMR/subquery.py index d11b7c4c..db48c213 100644 --- a/asf_search/CMR/subquery.py +++ b/asf_search/CMR/subquery.py @@ -137,28 +137,6 @@ def format_query_params(params) -> List[List[dict]]: def translate_param(param_name, param_val) -> List[dict]: - # param_list = [] - - # cmr_input_map = field_map - - # param_input_map = cmr_input_map[param_name] - # cmr_param = param_input_map['key'] - # cmr_format_str = param_input_map['fmt'] - - # if not isinstance(param_val, list): - # param_val = [param_val] - - # for l in param_val: - # format_val = l - - # if isinstance(l, list): - # format_val = ','.join([f'{t}' for t in l]) - - # param_list.append({ - # cmr_param: cmr_format_str.format(format_val) - # }) - - # return param_list param_list = [] if not isinstance(param_val, list): diff --git a/asf_search/Products/AIRSARProduct.py b/asf_search/Products/AIRSARProduct.py index aa1e0e8b..0f5e2902 100644 --- a/asf_search/Products/AIRSARProduct.py +++ b/asf_search/Products/AIRSARProduct.py @@ -7,7 +7,7 @@ class AIRSARProduct(ASFProduct): """ ASF Dataset Overview Page: https://asf.alaska.edu/data-sets/sar-data-sets/airsar/ """ - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index 035b3d1f..7052186f 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -10,7 +10,7 @@ class ALOSProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, diff --git a/asf_search/Products/ARIAS1GUNWProduct.py b/asf_search/Products/ARIAS1GUNWProduct.py index a65b6e24..f75ddb9e 100644 --- a/asf_search/Products/ARIAS1GUNWProduct.py +++ b/asf_search/Products/ARIAS1GUNWProduct.py @@ -12,7 +12,7 @@ class ARIAS1GUNWProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/derived-data-sets/sentinel-1-interferograms/ """ - _properties_paths = { + _base_properties = { **S1Product._properties_paths, 'perpendicularBaseline': {'path': ['AdditionalAttributes', ('Name', 'PERPENDICULAR_BASELINE'), 'Values', 0], 'cast': try_parse_float}, 'orbit': {'path': ['OrbitCalculatedSpatialDomains']}, diff --git a/asf_search/Products/ERSProduct.py b/asf_search/Products/ERSProduct.py index 4a53d35c..73aa447e 100644 --- a/asf_search/Products/ERSProduct.py +++ b/asf_search/Products/ERSProduct.py @@ -11,7 +11,7 @@ class ERSProduct(ASFStackableProduct): ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]}, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, diff --git a/asf_search/Products/JERSProduct.py b/asf_search/Products/JERSProduct.py index f829a760..5ea10e19 100644 --- a/asf_search/Products/JERSProduct.py +++ b/asf_search/Products/JERSProduct.py @@ -7,7 +7,7 @@ class JERSProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/jers-1/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/NISARProduct.py b/asf_search/Products/NISARProduct.py index cd4956e1..30fc97b0 100644 --- a/asf_search/Products/NISARProduct.py +++ b/asf_search/Products/NISARProduct.py @@ -10,7 +10,7 @@ class NISARProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']} } diff --git a/asf_search/Products/OPERAS1Product.py b/asf_search/Products/OPERAS1Product.py index 0c3c9a6d..91b24a78 100644 --- a/asf_search/Products/OPERAS1Product.py +++ b/asf_search/Products/OPERAS1Product.py @@ -8,7 +8,7 @@ class OPERAS1Product(S1Product): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/opera/ """ - _properties_paths = { + _base_properties = { **S1Product._properties_paths, 'centerLat': {'path': []}, # Opera products lacks these fields 'centerLon': {'path': []}, diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 7c933fe6..817465c4 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -8,7 +8,7 @@ class RADARSATProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/radarsat-1/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/S1BurstProduct.py b/asf_search/Products/S1BurstProduct.py index 7d35f923..44462544 100644 --- a/asf_search/Products/S1BurstProduct.py +++ b/asf_search/Products/S1BurstProduct.py @@ -17,7 +17,7 @@ class S1BurstProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/ """ - _properties_paths = { + _base_properties = { **S1Product._properties_paths, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]}, 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int}, diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index c6f24ff5..16b24d7c 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -15,7 +15,7 @@ class S1Product(ASFStackableProduct): ASF Dataset Overview Page: https://asf.alaska.edu/datasets/daac/sentinel-1/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/SEASATProduct.py b/asf_search/Products/SEASATProduct.py index 1158fcf1..90c760e8 100644 --- a/asf_search/Products/SEASATProduct.py +++ b/asf_search/Products/SEASATProduct.py @@ -7,7 +7,7 @@ class SEASATProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/seasat/ """ - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'bytes': {'path': [ 'AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/Products/SIRCProduct.py b/asf_search/Products/SIRCProduct.py index e2b05df3..bdd41821 100644 --- a/asf_search/Products/SIRCProduct.py +++ b/asf_search/Products/SIRCProduct.py @@ -5,7 +5,7 @@ class SIRCProduct(ASFProduct): """ Dataset Documentation Page: https://eospso.nasa.gov/missions/spaceborne-imaging-radar-c """ - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/SMAPProduct.py b/asf_search/Products/SMAPProduct.py index a2750032..1e4834dd 100644 --- a/asf_search/Products/SMAPProduct.py +++ b/asf_search/Products/SMAPProduct.py @@ -7,7 +7,7 @@ class SMAPProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/ """ - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/Products/UAVSARProduct.py b/asf_search/Products/UAVSARProduct.py index f33a39dc..280cf719 100644 --- a/asf_search/Products/UAVSARProduct.py +++ b/asf_search/Products/UAVSARProduct.py @@ -7,7 +7,7 @@ class UAVSARProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/uavsar/ """ - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index c8cfc926..9ed47163 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -100,7 +100,7 @@ def _cast_to_subclass(product: ASFProduct, subclass: Type[ASFProduct]) -> ASFPro example: ``` class MyCustomClass(ASFProduct): - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'some_unique_property': {'path': ['AdditionalAttributes', 'UNIQUE_PROPERTY', ...]} } diff --git a/examples/5-Download.ipynb b/examples/5-Download.ipynb index 058b068c..43fb651e 100644 --- a/examples/5-Download.ipynb +++ b/examples/5-Download.ipynb @@ -20,7 +20,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "id": "db06fa80-4ac3-40b5-9787-256b422d49e6", "metadata": { "tags": [] @@ -52,7 +52,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "38a2c671-0789-4e7c-b758-5b48745b2877", "metadata": {}, "outputs": [], @@ -322,6 +322,52 @@ "listdir('./downloads3')" ] }, + { + "cell_type": "markdown", + "id": "30760f6f", + "metadata": {}, + "source": [ + "***\n", + "## S3 URIs\n", + "Some products have S3 URIs available (SENTINEL-1, OPERA, and NISAR)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "dd4a81ed", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['s3://asf-cumulus-prod-opera-browse/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1_BROWSE.png.md5',\n", + " 's3://asf-cumulus-prod-opera-browse/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1_BROWSE_low-res.png.md5',\n", + " 's3://asf-cumulus-prod-opera-browse/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1_BROWSE_thumbnail.png.md5',\n", + " 's3://asf-cumulus-prod-opera-products/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1.h5',\n", + " 's3://asf-cumulus-prod-opera-products/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1.h5.md5',\n", + " 's3://asf-cumulus-prod-opera-products/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1.iso.xml',\n", + " 's3://asf-cumulus-prod-opera-products/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1.iso.xml.md5']" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "opera_product = asf.search(dataset=asf.DATASET.OPERA_S1, maxResults=1)[0]\n", + "opera_product.properties['s3Urls']" + ] + }, + { + "cell_type": "markdown", + "id": "159b5eb8", + "metadata": {}, + "source": [ + "From there authorized users can use their prefered method for authentication and downloading s3 objects." + ] + }, { "cell_type": "markdown", "id": "8aed3b7c-a557-4cbb-878e-aa9fe8330646", @@ -394,7 +440,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.11.5" }, "toc-autonumbering": false, "toc-showtags": false From 9f9e30054f56c15704bfaa30d6377394bd810a77 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 11:57:46 -0800 Subject: [PATCH 33/40] fixes indentation on _base_properties --- asf_search/ASFProduct.py | 6 +++--- asf_search/Products/AIRSARProduct.py | 2 +- asf_search/Products/ALOSProduct.py | 2 +- asf_search/Products/ARIAS1GUNWProduct.py | 2 +- asf_search/Products/ERSProduct.py | 2 +- asf_search/Products/JERSProduct.py | 2 +- asf_search/Products/NISARProduct.py | 2 +- asf_search/Products/OPERAS1Product.py | 2 +- asf_search/Products/RADARSATProduct.py | 2 +- asf_search/Products/S1BurstProduct.py | 2 +- asf_search/Products/S1Product.py | 2 +- asf_search/Products/SEASATProduct.py | 2 +- asf_search/Products/SIRCProduct.py | 2 +- asf_search/Products/SMAPProduct.py | 2 +- asf_search/Products/UAVSARProduct.py | 2 +- asf_search/search/baseline_search.py | 2 +- 16 files changed, 18 insertions(+), 18 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index 305352c5..97df8e48 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -41,7 +41,7 @@ class ASFProduct: def get_classname(cls): return cls.__name__ - _base_properties = { + _base_properties = { # min viable product 'centerLat': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0], 'cast': try_parse_float}, 'centerLon': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0], 'cast': try_parse_float}, @@ -67,7 +67,7 @@ def get_classname(cls): 'sensor': {'path': [ 'Platforms', 0, 'Instruments', 0, 'ShortName'], }, } """ - _base_properties dictionary, mapping readable property names to paths and optional type casting + _base_properties dictionary, mapping readable property names to paths and optional type casting entries are organized as such: - `PROPERTY_NAME`: The name the property should be called in `ASFProduct.properties` @@ -169,7 +169,7 @@ def stack( :param opts: An ASFSearchOptions object describing the search parameters to be used. Search parameters specified outside this object will override in event of a conflict. :param ASFProductSubclass: An ASFProduct subclass constructor. - + :return: ASFSearchResults containing the stack, with the addition of baseline values (temporal, perpendicular) attached to each ASFProduct. """ from .search.baseline_search import stack_from_product diff --git a/asf_search/Products/AIRSARProduct.py b/asf_search/Products/AIRSARProduct.py index 0f5e2902..4da1ab0f 100644 --- a/asf_search/Products/AIRSARProduct.py +++ b/asf_search/Products/AIRSARProduct.py @@ -7,7 +7,7 @@ class AIRSARProduct(ASFProduct): """ ASF Dataset Overview Page: https://asf.alaska.edu/data-sets/sar-data-sets/airsar/ """ - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index 7052186f..47748a79 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -10,7 +10,7 @@ class ALOSProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, diff --git a/asf_search/Products/ARIAS1GUNWProduct.py b/asf_search/Products/ARIAS1GUNWProduct.py index f75ddb9e..bbd84146 100644 --- a/asf_search/Products/ARIAS1GUNWProduct.py +++ b/asf_search/Products/ARIAS1GUNWProduct.py @@ -12,7 +12,7 @@ class ARIAS1GUNWProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/derived-data-sets/sentinel-1-interferograms/ """ - _base_properties = { + _base_properties = { **S1Product._properties_paths, 'perpendicularBaseline': {'path': ['AdditionalAttributes', ('Name', 'PERPENDICULAR_BASELINE'), 'Values', 0], 'cast': try_parse_float}, 'orbit': {'path': ['OrbitCalculatedSpatialDomains']}, diff --git a/asf_search/Products/ERSProduct.py b/asf_search/Products/ERSProduct.py index 73aa447e..2e0a54de 100644 --- a/asf_search/Products/ERSProduct.py +++ b/asf_search/Products/ERSProduct.py @@ -11,7 +11,7 @@ class ERSProduct(ASFStackableProduct): ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]}, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, diff --git a/asf_search/Products/JERSProduct.py b/asf_search/Products/JERSProduct.py index 5ea10e19..153fcda4 100644 --- a/asf_search/Products/JERSProduct.py +++ b/asf_search/Products/JERSProduct.py @@ -7,7 +7,7 @@ class JERSProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/jers-1/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/NISARProduct.py b/asf_search/Products/NISARProduct.py index 30fc97b0..c409cb67 100644 --- a/asf_search/Products/NISARProduct.py +++ b/asf_search/Products/NISARProduct.py @@ -10,7 +10,7 @@ class NISARProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']} } diff --git a/asf_search/Products/OPERAS1Product.py b/asf_search/Products/OPERAS1Product.py index 91b24a78..7b03f095 100644 --- a/asf_search/Products/OPERAS1Product.py +++ b/asf_search/Products/OPERAS1Product.py @@ -8,7 +8,7 @@ class OPERAS1Product(S1Product): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/opera/ """ - _base_properties = { + _base_properties = { **S1Product._properties_paths, 'centerLat': {'path': []}, # Opera products lacks these fields 'centerLon': {'path': []}, diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 817465c4..2edeec41 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -8,7 +8,7 @@ class RADARSATProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/radarsat-1/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/S1BurstProduct.py b/asf_search/Products/S1BurstProduct.py index 44462544..671ca6c9 100644 --- a/asf_search/Products/S1BurstProduct.py +++ b/asf_search/Products/S1BurstProduct.py @@ -17,7 +17,7 @@ class S1BurstProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/ """ - _base_properties = { + _base_properties = { **S1Product._properties_paths, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]}, 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int}, diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index 16b24d7c..d400444b 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -15,7 +15,7 @@ class S1Product(ASFStackableProduct): ASF Dataset Overview Page: https://asf.alaska.edu/datasets/daac/sentinel-1/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/SEASATProduct.py b/asf_search/Products/SEASATProduct.py index 90c760e8..eae227f5 100644 --- a/asf_search/Products/SEASATProduct.py +++ b/asf_search/Products/SEASATProduct.py @@ -7,7 +7,7 @@ class SEASATProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/seasat/ """ - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'bytes': {'path': [ 'AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/Products/SIRCProduct.py b/asf_search/Products/SIRCProduct.py index bdd41821..77de2c82 100644 --- a/asf_search/Products/SIRCProduct.py +++ b/asf_search/Products/SIRCProduct.py @@ -5,7 +5,7 @@ class SIRCProduct(ASFProduct): """ Dataset Documentation Page: https://eospso.nasa.gov/missions/spaceborne-imaging-radar-c """ - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/SMAPProduct.py b/asf_search/Products/SMAPProduct.py index 1e4834dd..b47a8c81 100644 --- a/asf_search/Products/SMAPProduct.py +++ b/asf_search/Products/SMAPProduct.py @@ -7,7 +7,7 @@ class SMAPProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/ """ - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/Products/UAVSARProduct.py b/asf_search/Products/UAVSARProduct.py index 280cf719..a335d1a9 100644 --- a/asf_search/Products/UAVSARProduct.py +++ b/asf_search/Products/UAVSARProduct.py @@ -7,7 +7,7 @@ class UAVSARProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/uavsar/ """ - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index 9ed47163..b48751ab 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -100,7 +100,7 @@ def _cast_to_subclass(product: ASFProduct, subclass: Type[ASFProduct]) -> ASFPro example: ``` class MyCustomClass(ASFProduct): - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'some_unique_property': {'path': ['AdditionalAttributes', 'UNIQUE_PROPERTY', ...]} } From 76769494ce04cd9955cac5ce28a5fa3741d59ebb Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 12:00:03 -0800 Subject: [PATCH 34/40] fixes old reference to properties paths --- asf_search/ASFProduct.py | 2 +- asf_search/Products/AIRSARProduct.py | 2 +- asf_search/Products/ALOSProduct.py | 2 +- asf_search/Products/ARIAS1GUNWProduct.py | 2 +- asf_search/Products/ERSProduct.py | 2 +- asf_search/Products/JERSProduct.py | 2 +- asf_search/Products/NISARProduct.py | 2 +- asf_search/Products/OPERAS1Product.py | 2 +- asf_search/Products/RADARSATProduct.py | 2 +- asf_search/Products/S1BurstProduct.py | 2 +- asf_search/Products/S1Product.py | 2 +- asf_search/Products/SEASATProduct.py | 2 +- asf_search/Products/SIRCProduct.py | 2 +- asf_search/Products/SMAPProduct.py | 2 +- asf_search/Products/UAVSARProduct.py | 2 +- asf_search/search/baseline_search.py | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index 97df8e48..230d90f6 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -265,7 +265,7 @@ def translate_product(self, item: Dict) -> Dict: properties = { prop: self._read_umm_property(umm, umm_mapping) - for prop, umm_mapping in self._properties_paths.items() + for prop, umm_mapping in self._base_properties.items() } if properties.get('url') is not None: diff --git a/asf_search/Products/AIRSARProduct.py b/asf_search/Products/AIRSARProduct.py index 4da1ab0f..6c8bc914 100644 --- a/asf_search/Products/AIRSARProduct.py +++ b/asf_search/Products/AIRSARProduct.py @@ -8,7 +8,7 @@ class AIRSARProduct(ASFProduct): ASF Dataset Overview Page: https://asf.alaska.edu/data-sets/sar-data-sets/airsar/ """ _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index 47748a79..92df7819 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -11,7 +11,7 @@ class ALOSProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/ """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'offNadirAngle': {'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0], 'cast': try_parse_float}, diff --git a/asf_search/Products/ARIAS1GUNWProduct.py b/asf_search/Products/ARIAS1GUNWProduct.py index bbd84146..91a87c95 100644 --- a/asf_search/Products/ARIAS1GUNWProduct.py +++ b/asf_search/Products/ARIAS1GUNWProduct.py @@ -13,7 +13,7 @@ class ARIAS1GUNWProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/derived-data-sets/sentinel-1-interferograms/ """ _base_properties = { - **S1Product._properties_paths, + **S1Product._base_properties, 'perpendicularBaseline': {'path': ['AdditionalAttributes', ('Name', 'PERPENDICULAR_BASELINE'), 'Values', 0], 'cast': try_parse_float}, 'orbit': {'path': ['OrbitCalculatedSpatialDomains']}, 'inputGranules': {'path': ['InputGranules']}, diff --git a/asf_search/Products/ERSProduct.py b/asf_search/Products/ERSProduct.py index 2e0a54de..8b6961aa 100644 --- a/asf_search/Products/ERSProduct.py +++ b/asf_search/Products/ERSProduct.py @@ -12,7 +12,7 @@ class ERSProduct(ASFStackableProduct): ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]}, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0]}, diff --git a/asf_search/Products/JERSProduct.py b/asf_search/Products/JERSProduct.py index 153fcda4..a70e1050 100644 --- a/asf_search/Products/JERSProduct.py +++ b/asf_search/Products/JERSProduct.py @@ -8,7 +8,7 @@ class JERSProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/jers-1/ """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/NISARProduct.py b/asf_search/Products/NISARProduct.py index c409cb67..e66ad77d 100644 --- a/asf_search/Products/NISARProduct.py +++ b/asf_search/Products/NISARProduct.py @@ -11,7 +11,7 @@ class NISARProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/ """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']} } diff --git a/asf_search/Products/OPERAS1Product.py b/asf_search/Products/OPERAS1Product.py index 7b03f095..9ee2b45e 100644 --- a/asf_search/Products/OPERAS1Product.py +++ b/asf_search/Products/OPERAS1Product.py @@ -9,7 +9,7 @@ class OPERAS1Product(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/opera/ """ _base_properties = { - **S1Product._properties_paths, + **S1Product._base_properties, 'centerLat': {'path': []}, # Opera products lacks these fields 'centerLon': {'path': []}, 'frameNumber': {'path': []}, diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 2edeec41..8dba91e8 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -9,7 +9,7 @@ class RADARSATProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/radarsat-1/ """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, diff --git a/asf_search/Products/S1BurstProduct.py b/asf_search/Products/S1BurstProduct.py index 671ca6c9..986a800a 100644 --- a/asf_search/Products/S1BurstProduct.py +++ b/asf_search/Products/S1BurstProduct.py @@ -18,7 +18,7 @@ class S1BurstProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/ """ _base_properties = { - **S1Product._properties_paths, + **S1Product._base_properties, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]}, 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int}, 'relativeBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_RELATIVE'), 'Values', 0], 'cast': try_parse_int}, diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index d400444b..6165a4cc 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -16,7 +16,7 @@ class S1Product(ASFStackableProduct): """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/SEASATProduct.py b/asf_search/Products/SEASATProduct.py index eae227f5..6cbe3479 100644 --- a/asf_search/Products/SEASATProduct.py +++ b/asf_search/Products/SEASATProduct.py @@ -8,7 +8,7 @@ class SEASATProduct(ASFProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/seasat/ """ _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'bytes': {'path': [ 'AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/SIRCProduct.py b/asf_search/Products/SIRCProduct.py index 77de2c82..812c2bfa 100644 --- a/asf_search/Products/SIRCProduct.py +++ b/asf_search/Products/SIRCProduct.py @@ -6,7 +6,7 @@ class SIRCProduct(ASFProduct): Dataset Documentation Page: https://eospso.nasa.gov/missions/spaceborne-imaging-radar-c """ _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion'] }, diff --git a/asf_search/Products/SMAPProduct.py b/asf_search/Products/SMAPProduct.py index b47a8c81..d852c7f8 100644 --- a/asf_search/Products/SMAPProduct.py +++ b/asf_search/Products/SMAPProduct.py @@ -8,7 +8,7 @@ class SMAPProduct(ASFProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/ """ _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/UAVSARProduct.py b/asf_search/Products/UAVSARProduct.py index a335d1a9..edf35f29 100644 --- a/asf_search/Products/UAVSARProduct.py +++ b/asf_search/Products/UAVSARProduct.py @@ -8,7 +8,7 @@ class UAVSARProduct(ASFProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/uavsar/ """ _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index b48751ab..b50b15ae 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -101,7 +101,7 @@ def _cast_to_subclass(product: ASFProduct, subclass: Type[ASFProduct]) -> ASFPro ``` class MyCustomClass(ASFProduct): _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'some_unique_property': {'path': ['AdditionalAttributes', 'UNIQUE_PROPERTY', ...]} } From 651f4b461738943002693558cb9d9636632d5a6d Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 12:19:54 -0800 Subject: [PATCH 35/40] removes PR trigger for pytest workflow, update example --- .github/workflows/run-pytest.yml | 2 +- ...vanced-Custom-ASFProduct-Subclassing.ipynb | 21 ++++++------------- 2 files changed, 7 insertions(+), 16 deletions(-) diff --git a/.github/workflows/run-pytest.yml b/.github/workflows/run-pytest.yml index 855336a6..56d759fb 100644 --- a/.github/workflows/run-pytest.yml +++ b/.github/workflows/run-pytest.yml @@ -1,6 +1,6 @@ name: tests -on: [pull_request, push] +on: [push] jobs: run-tests: diff --git a/examples/Advanced-Custom-ASFProduct-Subclassing.ipynb b/examples/Advanced-Custom-ASFProduct-Subclassing.ipynb index 804f1667..39cec733 100644 --- a/examples/Advanced-Custom-ASFProduct-Subclassing.ipynb +++ b/examples/Advanced-Custom-ASFProduct-Subclassing.ipynb @@ -15,14 +15,13 @@ "- `get_stack_opts()` (returns None in `ASFProduct`, implemented by `ASFStackableProduct` subclass and its subclasses)\n", "- `centroid()`\n", "- `remotezip()` (requires asf-search's optional dependency be installed)\n", - "- `get_property_paths()` (gets product's keywords and their paths in umm dictionary)\n", "- `translate_product()` (reads properties from umm, populates `properties` with associated keyword)\n", "- `get_sort_keys()`\n", "- `umm_get()`\n", "\n", "Key Properties:\n", "- `properties`\n", - "- `_base_properties` (What `get_property_paths()` uses to find values in umm json `properties`)\n", + "- `_base_properties` (maps `properties` keys to values in umm json)\n", "- `umm` (The product's umm JSON from CMR)\n", "- `metadata` (The product's metadata JSON from CMR)" ] @@ -196,12 +195,13 @@ " self.timestamp = datetime.now()\n", "\n", " # _base_properties is a special dict of ASFProduct that maps keywords to granule UMM json\n", - " # defining properties and their paths here in conjunction with `get_property_paths()` \n", - " # will let you easily access them in the product's `properties` dictionary\n", + " # defining properties and their paths here will let you\n", + " # easily access them in the product's `properties` dictionary\n", " # see `ASFProduct.umm_get()` for explanation of pathing\n", " _base_properties = {\n", " # Most product types use `CENTER_ESA_FRAME` as the value for `frameNumber` (unlike S1 and ALOS, which use `FRAME_NUMBER`), \n", " # this creates a new `esaFrame` property so we have that value too\n", + " **asf.S1Product._base_properties,\n", " 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME)\n", " }\n", "\n", @@ -234,16 +234,7 @@ " output['properties']['timestamp'] = str(self.timestamp)\n", " output['properties']['ASFSearchVersion'] = asf.__version__\n", " return output\n", - " \n", - " # This method is used internally by `ASFProduct.translate_product()` \n", - " # to traverse the granule UMM for each property's corresponding values\n", - " @staticmethod\n", - " def get_property_paths() -> dict:\n", - " return {\n", - " **asf.S1Product.get_property_paths(),\n", - " **MyCustomS1Subclass._base_properties\n", - " }\n", - " \n", + "\n", " # ASFProduct.stack() normally stacks the current product\n", " # in this version we search for every SLC-BURST product that\n", " # overlaps the given area with the same source scene, \n", @@ -367,7 +358,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.11.5" } }, "nbformat": 4, From 21462d2c365fef33c29a6b7dcb9d2058a058cfd8 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 14:15:28 -0800 Subject: [PATCH 36/40] updates tests formatting, remainder merge conflicts --- .github/workflows/lint.yml | 13 +- asf_search/ASFProduct.py | 212 +- asf_search/ASFSearchOptions/__init__.py | 2 +- asf_search/ASFSearchOptions/config.py | 2 +- asf_search/ASFSearchOptions/validator_map.py | 94 +- asf_search/ASFSearchOptions/validators.py | 80 +- asf_search/ASFSearchResults.py | 12 +- asf_search/ASFSession.py | 87 +- asf_search/ASFStackableProduct.py | 16 +- asf_search/CMR/MissionList.py | 6 +- asf_search/CMR/datasets.py | 2630 ++++++++--------- asf_search/CMR/field_map.py | 89 +- asf_search/CMR/subquery.py | 58 +- asf_search/CMR/translate.py | 108 +- asf_search/Products/AIRSARProduct.py | 11 +- asf_search/Products/ALOSProduct.py | 24 +- asf_search/Products/ARIAS1GUNWProduct.py | 44 +- asf_search/Products/ERSProduct.py | 5 +- asf_search/Products/NISARProduct.py | 15 +- asf_search/Products/OPERAS1Product.py | 125 +- asf_search/Products/RADARSATProduct.py | 29 - asf_search/Products/S1BurstProduct.py | 57 - asf_search/Products/S1Product.py | 60 +- asf_search/Products/SEASATProduct.py | 27 +- asf_search/Products/SIRCProduct.py | 24 +- asf_search/Products/SMAPProduct.py | 23 +- asf_search/Products/UAVSARProduct.py | 6 +- asf_search/WKT/RepairEntry.py | 2 +- asf_search/WKT/validate_wkt.py | 52 +- asf_search/__init__.py | 19 - asf_search/baseline/calc.py | 4 - asf_search/baseline/stack.py | 10 +- asf_search/constants/BEAMMODE.py | 94 +- asf_search/constants/DATASET.py | 32 +- asf_search/constants/FLIGHT_DIRECTION.py | 4 +- asf_search/constants/INSTRUMENT.py | 6 +- asf_search/constants/PLATFORM.py | 30 +- asf_search/constants/POLARIZATION.py | 32 +- asf_search/constants/PRODUCT_TYPE.py | 138 +- asf_search/constants/__init__.py | 2 +- asf_search/download/download.py | 34 +- asf_search/exceptions.py | 1 + asf_search/export/csv.py | 18 - asf_search/export/export_translators.py | 20 +- asf_search/export/geojson.py | 14 +- asf_search/export/jsonlite.py | 23 +- asf_search/export/jsonlite2.py | 20 - asf_search/export/kml.py | 9 - asf_search/export/metalink.py | 44 +- asf_search/health/health.py | 4 +- asf_search/search/baseline_search.py | 18 +- asf_search/search/campaigns.py | 22 +- asf_search/search/error_reporting.py | 21 +- asf_search/search/geo_search.py | 8 +- asf_search/search/granule_search.py | 4 +- asf_search/search/product_search.py | 4 +- asf_search/search/search.py | 14 +- asf_search/search/search_count.py | 22 +- asf_search/search/search_generator.py | 219 +- examples/hello_world.py | 49 +- pyproject.toml | 8 + setup.py | 93 +- tests/ASFProduct/test_ASFProduct.py | 62 +- .../ASFSearchOptions/test_ASFSearchOptions.py | 54 +- .../ASFSearchResults/test_ASFSearchResults.py | 118 +- tests/ASFSession/test_ASFSession.py | 91 +- tests/BaselineSearch/Stack/test_stack.py | 18 +- tests/BaselineSearch/test_baseline_search.py | 93 +- tests/CMR/test_MissionList.py | 19 +- tests/Search/test_search.py | 108 +- tests/Search/test_search_generator.py | 24 +- tests/Serialization/test_serialization.py | 7 +- tests/WKT/test_validate_wkt.py | 41 +- tests/download/test_download.py | 18 +- tests/pytest-managers.py | 292 +- 75 files changed, 2906 insertions(+), 2992 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 48b80ac4..d7dd83ff 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,9 +1,4 @@ -name: Lint - -on: - pull_request: - types: [opened, edited, reopened] - push: +on: push jobs: lint: @@ -11,8 +6,6 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: TrueBrain/actions-flake8@v2 + - uses: chartboost/ruff-action@v1 with: - flake8_version: 6.0.0 - path: asf_search - max_line_length: 100 + src: asf_search diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index 3bb3652d..b12e31ae 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -46,79 +46,71 @@ def get_classname(cls): _base_properties = { # min viable product - "centerLat": { - "path": ["AdditionalAttributes", ("Name", "CENTER_LAT"), "Values", 0], - "cast": try_parse_float, + 'centerLat': { + 'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0], + 'cast': try_parse_float, }, - "centerLon": { - "path": ["AdditionalAttributes", ("Name", "CENTER_LON"), "Values", 0], - "cast": try_parse_float, + 'centerLon': { + 'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0], + 'cast': try_parse_float, }, - "stopTime": { - "path": ["TemporalExtent", "RangeDateTime", "EndingDateTime"], - "cast": try_parse_date, + 'stopTime': { + 'path': ['TemporalExtent', 'RangeDateTime', 'EndingDateTime'], + 'cast': try_parse_date, }, # primary search results sort key - "fileID": {"path": ["GranuleUR"]}, # secondary search results sort key - "flightDirection": { - "path": [ - "AdditionalAttributes", - ("Name", "ASCENDING_DESCENDING"), - "Values", + 'fileID': {'path': ['GranuleUR']}, # secondary search results sort key + 'flightDirection': { + 'path': [ + 'AdditionalAttributes', + ('Name', 'ASCENDING_DESCENDING'), + 'Values', 0, ] }, - "pathNumber": { - "path": ["AdditionalAttributes", ("Name", "PATH_NUMBER"), "Values", 0], - "cast": try_parse_int, + 'pathNumber': { + 'path': ['AdditionalAttributes', ('Name', 'PATH_NUMBER'), 'Values', 0], + 'cast': try_parse_int, }, - "processingLevel": { - "path": ["AdditionalAttributes", ("Name", "PROCESSING_TYPE"), "Values", 0] + 'processingLevel': { + 'path': ['AdditionalAttributes', ('Name', 'PROCESSING_TYPE'), 'Values', 0] }, # commonly used - "url": {"path": ["RelatedUrls", ("Type", "GET DATA"), "URL"]}, - "startTime": { - "path": ["TemporalExtent", "RangeDateTime", "BeginningDateTime"], - "cast": try_parse_date, + 'url': {'path': ['RelatedUrls', ('Type', 'GET DATA'), 'URL']}, + 'startTime': { + 'path': ['TemporalExtent', 'RangeDateTime', 'BeginningDateTime'], + 'cast': try_parse_date, }, - "sceneName": { - "path": [ - "DataGranule", - "Identifiers", - ("IdentifierType", "ProducerGranuleId"), - "Identifier", + 'sceneName': { + 'path': [ + 'DataGranule', + 'Identifiers', + ('IdentifierType', 'ProducerGranuleId'), + 'Identifier', ] }, - "browse": { - "path": ["RelatedUrls", ("Type", [("GET RELATED VISUALIZATION", "URL")])] + 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, + 'platform': {'path': ['AdditionalAttributes', ('Name', 'ASF_PLATFORM'), 'Values', 0]}, + 'bytes': { + 'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], + 'cast': try_round_float, }, - "platform": { - "path": ["AdditionalAttributes", ("Name", "ASF_PLATFORM"), "Values", 0] - }, - "bytes": { - "path": ["AdditionalAttributes", ("Name", "BYTES"), "Values", 0], - "cast": try_round_float, - }, - "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, - "frameNumber": { - "path": ["AdditionalAttributes", ("Name", "CENTER_ESA_FRAME"), "Values", 0], - "cast": try_parse_int, + 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, + 'frameNumber': { + 'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], + 'cast': try_parse_int, }, # overloaded by S1, ALOS, and ERS - "granuleType": { - "path": ["AdditionalAttributes", ("Name", "GRANULE_TYPE"), "Values", 0] - }, - "orbit": { - "path": ["OrbitCalculatedSpatialDomains", 0, "OrbitNumber"], - "cast": try_parse_int, + 'granuleType': {'path': ['AdditionalAttributes', ('Name', 'GRANULE_TYPE'), 'Values', 0]}, + 'orbit': { + 'path': ['OrbitCalculatedSpatialDomains', 0, 'OrbitNumber'], + 'cast': try_parse_int, }, - "polarization": { - "path": ["AdditionalAttributes", ("Name", "POLARIZATION"), "Values", 0] + 'polarization': {'path': ['AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values', 0]}, + 'processingDate': { + 'path': ['DataGranule', 'ProductionDateTime'], + 'cast': try_parse_date, }, - "processingDate": { - "path": ["DataGranule", "ProductionDateTime"], - "cast": try_parse_date, - }, - "sensor": { - "path": ["Platforms", 0, "Instruments", 0, "ShortName"], + 'sensor': { + 'path': ['Platforms', 0, 'Instruments', 0, 'ShortName'], }, } """ @@ -140,13 +132,13 @@ def get_classname(cls): """ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): - self.meta = args.get("meta") - self.umm = args.get("umm") + self.meta = args.get('meta') + self.umm = args.get('umm') translated = self.translate_product(args) - self.properties = translated["properties"] - self.geometry = translated["geometry"] + self.properties = translated['properties'] + self.geometry = translated['geometry'] self.baseline = None self.session = session @@ -159,9 +151,9 @@ def geojson(self) -> Dict: with `type`, `geometry`, and `properties` keys """ return { - "type": "Feature", - "geometry": self.geometry, - "properties": self.properties, + 'type': 'Feature', + 'geometry': self.geometry, + 'properties': self.properties, } def download( @@ -181,12 +173,12 @@ def download( :return: None """ - default_filename = self.properties["fileName"] + default_filename = self.properties['fileName'] if filename is not None: multiple_files = ( fileType == FileDownloadType.ADDITIONAL_FILES - and len(self.properties["additionalUrls"]) > 1 + and len(self.properties['additionalUrls']) > 1 ) or fileType == FileDownloadType.ALL_FILES if multiple_files: warnings.warn( @@ -207,11 +199,11 @@ def download( download_url( url=url, path=path, - filename=f"{base_filename}.{extension}", - session=session + filename=f'{base_filename}.{extension}', + session=session, ) - def get_urls(self, fileType = FileDownloadType.DEFAULT_FILE) -> list: + def get_urls(self, fileType=FileDownloadType.DEFAULT_FILE) -> list: urls = [] if fileType == FileDownloadType.DEFAULT_FILE: @@ -222,7 +214,9 @@ def get_urls(self, fileType = FileDownloadType.DEFAULT_FILE) -> list: urls.append(self.properties['url']) urls.extend(self.properties.get('additionalUrls', [])) else: - raise ValueError("Invalid FileDownloadType provided, the valid types are 'DEFAULT_FILE', 'ADDITIONAL_FILES', and 'ALL_FILES'") + raise ValueError( + "Invalid FileDownloadType provided, the valid types are 'DEFAULT_FILE', 'ADDITIONAL_FILES', and 'ALL_FILES'" + ) return urls def _get_additional_filenames_and_urls( @@ -231,7 +225,7 @@ def _get_additional_filenames_and_urls( ) -> List[Tuple[str, str]]: return [ (self._parse_filename_from_url(url), url) - for url in self.properties.get("additionalUrls", []) + for url in self.properties.get('additionalUrls', []) ] def _parse_filename_from_url(self, url: str) -> str: @@ -240,7 +234,7 @@ def _parse_filename_from_url(self, url: str) -> str: return filename def stack( - self, opts: ASFSearchOptions = None, useSubclass: Type["ASFProduct"] = None + self, opts: ASFSearchOptions = None, useSubclass: Type['ASFProduct'] = None ) -> ASFSearchResults: """ Builds a baseline stack from this product. @@ -274,41 +268,39 @@ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: return None def _get_access_urls( - self, url_types: List[str] = ["GET DATA", "EXTENDED METADATA"] + self, url_types: List[str] = ['GET DATA', 'EXTENDED METADATA'] ) -> List[str]: accessUrls = [] for url_type in url_types: - if urls := self.umm_get( - self.umm, "RelatedUrls", ("Type", [(url_type, "URL")]), 0 - ): + if urls := self.umm_get(self.umm, 'RelatedUrls', ('Type', [(url_type, 'URL')]), 0): accessUrls.extend(urls) return sorted(list(set(accessUrls))) def _get_additional_urls(self) -> List[str]: - accessUrls = self._get_access_urls(["GET DATA", "EXTENDED METADATA"]) + accessUrls = self._get_access_urls(['GET DATA', 'EXTENDED METADATA']) return [ url for url in accessUrls - if not url.endswith(".md5") - and not url.startswith("s3://") - and "s3credentials" not in url - and not url.endswith(".png") - and url != self.properties["url"] + if not url.endswith('.md5') + and not url.startswith('s3://') + and 's3credentials' not in url + and not url.endswith('.png') + and url != self.properties['url'] ] def _get_s3_urls(self) -> List[str]: s3_urls = self._get_access_urls( - ["GET DATA", "EXTENDED METADATA", "GET DATA VIA DIRECT ACCESS"] + ['GET DATA', 'EXTENDED METADATA', 'GET DATA VIA DIRECT ACCESS'] ) - return [url for url in s3_urls if url.startswith("s3://")] + return [url for url in s3_urls if url.startswith('s3://')] def centroid(self) -> Point: """ Finds the centroid of a product """ - coords = mapping(shape(self.geometry))["coordinates"][0] + coords = mapping(shape(self.geometry))['coordinates'][0] lons = [p[0] for p in coords] if max(lons) - min(lons) > 180: unwrapped_coords = [a if a[0] > 0 else [a[0] + 360, a[1]] for a in coords] @@ -317,7 +309,7 @@ def centroid(self) -> Point: return Polygon(unwrapped_coords).centroid - def remotezip(self, session: ASFSession) -> "RemoteZip": # type: ignore # noqa: F821 + def remotezip(self, session: ASFSession) -> 'RemoteZip': # type: ignore # noqa: F821 """Returns a RemoteZip object which can be used to download a part of an ASFProduct's zip archive. (See example in examples/5-Download.ipynb) @@ -329,52 +321,52 @@ def remotezip(self, session: ASFSession) -> "RemoteZip": # type: ignore # noqa: """ from .download.download import remotezip - return remotezip(self.properties["url"], session=session) + return remotezip(self.properties['url'], session=session) def _read_umm_property(self, umm: Dict, mapping: Dict) -> Any: - value = self.umm_get(umm, *mapping["path"]) - if mapping.get("cast") is None: + value = self.umm_get(umm, *mapping['path']) + if mapping.get('cast') is None: return value - return self.umm_cast(mapping["cast"], value) + return self.umm_cast(mapping['cast'], value) def translate_product(self, item: Dict) -> Dict: """ Generates `properties` and `geometry` from the CMR UMM response """ try: - coordinates = item["umm"]["SpatialExtent"]["HorizontalSpatialDomain"][ - "Geometry" - ]["GPolygons"][0]["Boundary"]["Points"] - coordinates = [[c["Longitude"], c["Latitude"]] for c in coordinates] - geometry = {"coordinates": [coordinates], "type": "Polygon"} + coordinates = item['umm']['SpatialExtent']['HorizontalSpatialDomain']['Geometry'][ + 'GPolygons' + ][0]['Boundary']['Points'] + coordinates = [[c['Longitude'], c['Latitude']] for c in coordinates] + geometry = {'coordinates': [coordinates], 'type': 'Polygon'} except KeyError: - geometry = {"coordinates": None, "type": "Polygon"} + geometry = {'coordinates': None, 'type': 'Polygon'} - umm = item.get("umm") + umm = item.get('umm') # additionalAttributes = {attr['Name']: attr['Values'] for attr in umm['AdditionalAttributes']} properties = { - prop: self._read_umm_property(umm, umm_mapping) + prop: self._read_umm_property(umm, umm_mapping) for prop, umm_mapping in self._base_properties.items() } - if properties.get("url") is not None: - properties["fileName"] = properties["url"].split("/")[-1] + if properties.get('url') is not None: + properties['fileName'] = properties['url'].split('/')[-1] else: - properties["fileName"] = None + properties['fileName'] = None # Fallbacks - if properties.get("beamModeType") is None: - properties["beamModeType"] = self.umm_get( - umm, "AdditionalAttributes", ("Name", "BEAM_MODE"), "Values", 0 + if properties.get('beamModeType') is None: + properties['beamModeType'] = self.umm_get( + umm, 'AdditionalAttributes', ('Name', 'BEAM_MODE'), 'Values', 0 ) - if properties.get("platform") is None: - properties["platform"] = self.umm_get(umm, "Platforms", 0, "ShortName") + if properties.get('platform') is None: + properties['platform'] = self.umm_get(umm, 'Platforms', 0, 'ShortName') - return {"geometry": geometry, "properties": properties, "type": "Feature"} + return {'geometry': geometry, 'properties': properties, 'type': 'Feature'} def get_sort_keys(self) -> Tuple[str, str]: """ @@ -383,9 +375,9 @@ def get_sort_keys(self) -> Tuple[str, str]: """ # `sort()` will raise an error when comparing `NoneType`, # using self._read_property() to wrap standard `dict.get()` for possible `None` values - primary_key = self._read_property(key="stopTime", default="") + primary_key = self._read_property(key='stopTime', default='') secondary_key = self._read_property( - key="fileID", default=self._read_property("sceneName", "") + key='fileID', default=self._read_property('sceneName', '') ) return (primary_key, secondary_key) @@ -509,7 +501,7 @@ def umm_get(item: Dict, *args): return None if item is None: return None - if item in [None, "NA", "N/A", ""]: + if item in [None, 'NA', 'N/A', '']: item = None return item diff --git a/asf_search/ASFSearchOptions/__init__.py b/asf_search/ASFSearchOptions/__init__.py index a41f85ff..69831107 100644 --- a/asf_search/ASFSearchOptions/__init__.py +++ b/asf_search/ASFSearchOptions/__init__.py @@ -1,2 +1,2 @@ -from .ASFSearchOptions import ASFSearchOptions # noqa F401 +from .ASFSearchOptions import ASFSearchOptions # noqa F401 from .validators import * # noqa F401 F403 diff --git a/asf_search/ASFSearchOptions/config.py b/asf_search/ASFSearchOptions/config.py index 6b02e947..b401da10 100644 --- a/asf_search/ASFSearchOptions/config.py +++ b/asf_search/ASFSearchOptions/config.py @@ -5,5 +5,5 @@ 'host': INTERNAL.CMR_HOST, 'provider': INTERNAL.DEFAULT_PROVIDER, 'session': ASFSession(), - 'collectionAlias': True + 'collectionAlias': True, } diff --git a/asf_search/ASFSearchOptions/validator_map.py b/asf_search/ASFSearchOptions/validator_map.py index 7864eda2..ce72009e 100644 --- a/asf_search/ASFSearchOptions/validator_map.py +++ b/asf_search/ASFSearchOptions/validator_map.py @@ -30,58 +30,56 @@ def validate(key, value): try: return validator_map[key](value) except ValueError as exc: - ASF_LOGGER.exception( - f"Failed to parse item in ASFSearchOptions: {key=} {value=} {exc=}" - ) + ASF_LOGGER.exception(f'Failed to parse item in ASFSearchOptions: {key=} {value=} {exc=}') raise validator_map = { # Search parameters Parser - "maxResults": int, - "absoluteOrbit": parse_int_or_range_list, - "asfFrame": parse_int_or_range_list, - "beamMode": parse_string_list, - "beamSwath": parse_string_list, - "campaign": parse_string, - "circle": parse_circle, - "linestring": parse_linestring, - "point": parse_point, - "maxDoppler": parse_float, - "minDoppler": parse_float, - "maxFaradayRotation": parse_float, - "minFaradayRotation": parse_float, - "flightDirection": parse_string, - "flightLine": parse_string, - "frame": parse_int_or_range_list, - "granule_list": parse_string_list, - "product_list": parse_string_list, - "intersectsWith": parse_wkt, - "lookDirection": parse_string, - "offNadirAngle": parse_float_or_range_list, - "platform": parse_string_list, - "polarization": parse_string_list, - "processingLevel": parse_string_list, - "relativeOrbit": parse_int_or_range_list, - "processingDate": parse_date, - "start": parse_date, - "end": parse_date, - "season": parse_int_list, - "groupID": parse_string_list, - "insarStackId": parse_string, - "instrument": parse_string, - "collections": parse_string_list, - "shortName": parse_string_list, - "temporalBaselineDays": parse_string_list, - "operaBurstID": parse_string_list, - "absoluteBurstID": parse_int_list, - "relativeBurstID": parse_int_list, - "fullBurstID": parse_string_list, - "dataset": parse_string_list, - "cmr_keywords": parse_cmr_keywords_list, + 'maxResults': int, + 'absoluteOrbit': parse_int_or_range_list, + 'asfFrame': parse_int_or_range_list, + 'beamMode': parse_string_list, + 'beamSwath': parse_string_list, + 'campaign': parse_string, + 'circle': parse_circle, + 'linestring': parse_linestring, + 'point': parse_point, + 'maxDoppler': parse_float, + 'minDoppler': parse_float, + 'maxFaradayRotation': parse_float, + 'minFaradayRotation': parse_float, + 'flightDirection': parse_string, + 'flightLine': parse_string, + 'frame': parse_int_or_range_list, + 'granule_list': parse_string_list, + 'product_list': parse_string_list, + 'intersectsWith': parse_wkt, + 'lookDirection': parse_string, + 'offNadirAngle': parse_float_or_range_list, + 'platform': parse_string_list, + 'polarization': parse_string_list, + 'processingLevel': parse_string_list, + 'relativeOrbit': parse_int_or_range_list, + 'processingDate': parse_date, + 'start': parse_date, + 'end': parse_date, + 'season': parse_int_list, + 'groupID': parse_string_list, + 'insarStackId': parse_string, + 'instrument': parse_string, + 'collections': parse_string_list, + 'shortName': parse_string_list, + 'temporalBaselineDays': parse_string_list, + 'operaBurstID': parse_string_list, + 'absoluteBurstID': parse_int_list, + 'relativeBurstID': parse_int_list, + 'fullBurstID': parse_string_list, + 'dataset': parse_string_list, + 'cmr_keywords': parse_cmr_keywords_list, # Config parameters Parser - "session": parse_session, - "host": parse_string, - "provider": parse_string, - "collectionAlias": bool, + 'session': parse_session, + 'host': parse_string, + 'provider': parse_string, + 'collectionAlias': bool, } diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index 30d82dd7..c8d7bbcd 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -7,7 +7,7 @@ import math from shapely import wkt, errors -number = TypeVar("number", int, float) +number = TypeVar('number', int, float) def parse_string(value: str) -> str: @@ -18,13 +18,11 @@ def parse_string(value: str) -> str: """ # Convert to string first, so length is checked against only str types: try: - value = f"{value}" + value = f'{value}' except ValueError as exc: # If this happens, printing v's value would fail too... - raise ValueError( - f"Invalid string: Can't cast type '{type(value)}' to string." - ) from exc + raise ValueError(f"Invalid string: Can't cast type '{type(value)}' to string.") from exc if len(value) == 0: - raise ValueError("Invalid string: Empty.") + raise ValueError('Invalid string: Empty.') return value @@ -37,9 +35,9 @@ def parse_float(value: float) -> float: try: value = float(value) except ValueError as exc: - raise ValueError(f"Invalid float: {value}") from exc + raise ValueError(f'Invalid float: {value}') from exc if math.isinf(value) or math.isnan(value): - raise ValueError(f"Float values must be finite: got {value}") + raise ValueError(f'Float values must be finite: got {value}') return value @@ -57,7 +55,7 @@ def parse_date(value: Union[str, datetime]) -> Union[datetime, str]: if date is None: raise ValueError(f"Invalid date: '{value}'.") - return _to_utc(date).strftime("%Y-%m-%dT%H:%M:%SZ") + return _to_utc(date).strftime('%Y-%m-%dT%H:%M:%SZ') def _to_utc(date: datetime): @@ -84,26 +82,24 @@ def parse_range( """ if isinstance(value, tuple): if len(value) < 2: - raise ValueError(f"Not enough values in min/max tuple: {value}") + raise ValueError(f'Not enough values in min/max tuple: {value}') if len(value) > 2: - raise ValueError(f"Too many values in min/max tuple: {value}") + raise ValueError(f'Too many values in min/max tuple: {value}') value = (h(value[0]), h(value[1])) if math.isinf(value[0]) or math.isnan(value[0]): raise ValueError( - f"Expected finite numeric min in min/max tuple, got {value[0]}: {value}" + f'Expected finite numeric min in min/max tuple, got {value[0]}: {value}' ) if math.isinf(value[1]) or math.isnan(value[1]): raise ValueError( - f"Expected finite numeric max in min/max tuple, got {value[1]}: {value}" + f'Expected finite numeric max in min/max tuple, got {value[1]}: {value}' ) if value[0] > value[1]: raise ValueError( - f"Min must be less than max when using min/max tuples to search: {value}" + f'Min must be less than max when using min/max tuples to search: {value}' ) return value - raise ValueError( - f"Invalid range. Expected 2-value numeric tuple, got {type(value)}: {value}" - ) + raise ValueError(f'Invalid range. Expected 2-value numeric tuple, got {type(value)}: {value}') # Parse and validate a date range: "1991-10-01T00:00:00Z,1991-10-02T00:00:00Z" @@ -131,7 +127,7 @@ def parse_list(value: Sequence, h) -> List: try: return [h(a) for a in value] except ValueError as exc: - raise ValueError(f"Invalid {h.__name__} list: {exc}") from exc + raise ValueError(f'Invalid {h.__name__} list: {exc}') from exc def parse_cmr_keywords_list(value: Sequence[Union[Dict, Sequence]]): @@ -143,19 +139,19 @@ def parse_cmr_keywords_list(value: Sequence[Union[Dict, Sequence]]): for idx, item in enumerate(value): if not isinstance(item, tuple) and not isinstance(item, Sequence): raise ValueError( - f"Expected item in cmr_keywords list index {idx} to be tuple pair, " - f"got value {item} of type {type(item)}" + f'Expected item in cmr_keywords list index {idx} to be tuple pair, ' + f'got value {item} of type {type(item)}' ) if len(item) != 2: raise ValueError( - f"Expected item in cmr_keywords list index {idx} to be of length 2, " - f"got value {item} of length {len(item)}" + f'Expected item in cmr_keywords list index {idx} to be of length 2, ' + f'got value {item} of length {len(item)}' ) search_key, search_value = item if not isinstance(search_key, str) or not isinstance(search_value, str): raise ValueError( - f"Expected tuple pair of types: " + f'Expected tuple pair of types: ' f'"{type(str)}, {type(str)}" in cmr_keywords at index {idx}, ' f'got value "{str(item)}" ' f'of types: "{type(search_key)}, {type(search_value)}"' @@ -190,7 +186,7 @@ def parse_number_or_range(value: Union[List, Tuple[number, number], range], h): return h(value) except ValueError as exc: - raise ValueError(f"Invalid {h.__name__} or range: {exc}") from exc + raise ValueError(f'Invalid {h.__name__} or range: {exc}') from exc # Parse and validate an iterable of numbers or number ranges, using h() to validate each value: @@ -215,18 +211,14 @@ def parse_float_or_range_list(value: Sequence) -> List: # Parse and validate a coordinate list def parse_coord_list(value: Sequence[float]) -> List[float]: if not isinstance(value, Sequence): - raise ValueError( - f"Invalid coord list list: Must pass in an iterable. Got {type(value)}." - ) + raise ValueError(f'Invalid coord list list: Must pass in an iterable. Got {type(value)}.') for coord in value: try: float(coord) except ValueError as exc: - raise ValueError(f"Invalid coordinate: {coord}") from exc + raise ValueError(f'Invalid coordinate: {coord}') from exc if len(value) % 2 != 0: - raise ValueError( - f"Invalid coordinate list, odd number of values provided: {value}" - ) + raise ValueError(f'Invalid coordinate list, odd number of values provided: {value}') return value @@ -236,9 +228,9 @@ def parse_bbox_list(value: Sequence[float]) -> List[float]: # This also makes sure v is an iterable: value = parse_coord_list(value) except ValueError as exc: - raise ValueError(f"Invalid bbox: {exc}") from exc + raise ValueError(f'Invalid bbox: {exc}') from exc if len(value) != 4: - raise ValueError(f"Invalid bbox, must be 4 values: {value}") + raise ValueError(f'Invalid bbox, must be 4 values: {value}') return value @@ -248,9 +240,9 @@ def parse_point_list(value: Sequence[float]) -> List[float]: # This also makes sure v is an iterable: value = parse_coord_list(value) except ValueError as exc: - raise ValueError(f"Invalid point: {exc}") from exc + raise ValueError(f'Invalid point: {exc}') from exc if len(value) != 2: - raise ValueError(f"Invalid point, must be 2 values: {value}") + raise ValueError(f'Invalid point, must be 2 values: {value}') return value @@ -259,7 +251,7 @@ def parse_wkt(value: str) -> str: try: value = wkt.loads(value) except errors.WKTReadingError as exc: - raise ValueError(f"Invalid wkt: {exc}") from exc + raise ValueError(f'Invalid wkt: {exc}') from exc return wkt.dumps(value) @@ -268,9 +260,7 @@ def parse_wkt(value: str) -> str: def parse_circle(value: List[float]) -> List[float]: value = parse_float_list(value) if len(value) != 3: - raise ValueError( - f"Invalid circle, must be 3 values (lat, long, radius). Got: {value}" - ) + raise ValueError(f'Invalid circle, must be 3 values (lat, long, radius). Got: {value}') return value @@ -280,7 +270,7 @@ def parse_linestring(value: List[float]) -> List[float]: value = parse_float_list(value) if len(value) % 2 != 0: raise ValueError( - f"Invalid linestring, must be values of format (lat, long, lat, long, ...). Got: {value}" + f'Invalid linestring, must be values of format (lat, long, lat, long, ...). Got: {value}' ) return value @@ -288,9 +278,7 @@ def parse_linestring(value: List[float]) -> List[float]: def parse_point(value: List[float]) -> List[float]: value = parse_float_list(value) if len(value) != 2: - raise ValueError( - f"Invalid point, must be values of format (lat, long). Got: {value}" - ) + raise ValueError(f'Invalid point, must be values of format (lat, long). Got: {value}') return value @@ -299,7 +287,7 @@ def parse_coord_string(value: List): value = parse_float_list(value) if len(value) % 2 != 0: raise ValueError( - f"Invalid coordinate string, must be values of format (lat, long, lat, long, ...). Got: {value}" + f'Invalid coordinate string, must be values of format (lat, long, lat, long, ...). Got: {value}' ) return value @@ -310,6 +298,6 @@ def parse_session(session: Type[requests.Session]): return session else: raise ValueError( - "Invalid Session: expected ASFSession or a requests.Session subclass. " - f"Got {type(session)}" + 'Invalid Session: expected ASFSession or a requests.Session subclass. ' + f'Got {type(session)}' ) diff --git a/asf_search/ASFSearchResults.py b/asf_search/ASFSearchResults.py index c3b2fa01..ee3de2d2 100644 --- a/asf_search/ASFSearchResults.py +++ b/asf_search/ASFSearchResults.py @@ -23,8 +23,8 @@ def __init__(self, *args, opts: ASFSearchOptions = None): def geojson(self): return { - "type": "FeatureCollection", - "features": [product.geojson() for product in self], + 'type': 'FeatureCollection', + 'features': [product.geojson() for product in self], } def csv(self): @@ -66,25 +66,25 @@ def download( Number of download processes to use. Defaults to 1 (i.e. sequential download) """ - ASF_LOGGER.info(f"Started downloading ASFSearchResults of size {len(self)}.") + ASF_LOGGER.info(f'Started downloading ASFSearchResults of size {len(self)}.') if processes == 1: for product in self: product.download(path=path, session=session, fileType=fileType) else: - ASF_LOGGER.info(f"Using {processes} threads - starting up pool.") + ASF_LOGGER.info(f'Using {processes} threads - starting up pool.') pool = Pool(processes=processes) args = [(product, path, session, fileType) for product in self] pool.map(_download_product, args) pool.close() pool.join() - ASF_LOGGER.info(f"Finished downloading ASFSearchResults of size {len(self)}.") + ASF_LOGGER.info(f'Finished downloading ASFSearchResults of size {len(self)}.') def raise_if_incomplete(self) -> None: if not self.searchComplete: msg = ( 'Results are incomplete due to a search error. ' 'See logging for more details. (ASFSearchResults.raise_if_incomplete called)' - ) + ) ASF_LOGGER.error(msg) raise ASFSearchError(msg) diff --git a/asf_search/ASFSession.py b/asf_search/ASFSession.py index b9444374..ca6c13f5 100644 --- a/asf_search/ASFSession.py +++ b/asf_search/ASFSession.py @@ -61,32 +61,26 @@ def __init__( https://urs.earthdata.nasa.gov/documentation/faq """ super().__init__() - user_agent = "; ".join( + user_agent = '; '.join( [ - f"Python/{platform.python_version()}", - f"{requests.__name__}/{requests.__version__}", - f"{asf_name}/{asf_version}", + f'Python/{platform.python_version()}', + f'{requests.__name__}/{requests.__version__}', + f'{asf_name}/{asf_version}', ] ) - self.headers.update({"User-Agent": user_agent}) # For all hosts - self.headers.update({"Client-Id": f"{asf_name}_v{asf_version}"}) # For CMR + self.headers.update({'User-Agent': user_agent}) # For all hosts + self.headers.update({'Client-Id': f'{asf_name}_v{asf_version}'}) # For CMR from asf_search.constants import INTERNAL self.edl_host = INTERNAL.EDL_HOST if edl_host is None else edl_host - self.edl_client_id = ( - INTERNAL.EDL_CLIENT_ID if edl_client_id is None else edl_client_id - ) - self.asf_auth_host = ( - INTERNAL.ASF_AUTH_HOST if asf_auth_host is None else asf_auth_host - ) + self.edl_client_id = INTERNAL.EDL_CLIENT_ID if edl_client_id is None else edl_client_id + self.asf_auth_host = INTERNAL.ASF_AUTH_HOST if asf_auth_host is None else asf_auth_host self.cmr_collections = ( INTERNAL.CMR_COLLECTIONS if cmr_collections is None else cmr_collections ) - self.auth_domains = ( - INTERNAL.AUTH_DOMAINS if auth_domains is None else auth_domains - ) + self.auth_domains = INTERNAL.AUTH_DOMAINS if auth_domains is None else auth_domains self.auth_cookie_names = ( INTERNAL.AUTH_COOKIES if auth_cookie_names is None else auth_cookie_names ) @@ -131,7 +125,7 @@ def auth_with_creds(self, username: str, password: str): ---------- ASFSession """ - login_url = f"https://{self.edl_host}/oauth/authorize?client_id={self.edl_client_id}&response_type=code&redirect_uri=https://{self.asf_auth_host}/login" # noqa F401 + login_url = f'https://{self.edl_host}/oauth/authorize?client_id={self.edl_client_id}&response_type=code&redirect_uri=https://{self.asf_auth_host}/login' # noqa F401 self.auth = (username, password) @@ -139,11 +133,11 @@ def auth_with_creds(self, username: str, password: str): self.get(login_url) if not self._check_auth_cookies(self.cookies.get_dict()): - raise ASFAuthenticationError("Username or password is incorrect") + raise ASFAuthenticationError('Username or password is incorrect') ASF_LOGGER.info('Login successful') - token = self.cookies.get_dict().get("urs-access-token") + token = self.cookies.get_dict().get('urs-access-token') if token is None: ASF_LOGGER.warning( @@ -176,15 +170,15 @@ def auth_with_token(self, token: str): ASFSession """ oauth_authorization = ( - f"https://{self.edl_host}/oauth/tokens/user?client_id={self.edl_client_id}" + f'https://{self.edl_host}/oauth/tokens/user?client_id={self.edl_client_id}' ) - ASF_LOGGER.info(f"Authenticating EDL token against {oauth_authorization}") - response = self.post(url=oauth_authorization, data={"token": token}) + ASF_LOGGER.info(f'Authenticating EDL token against {oauth_authorization}') + response = self.post(url=oauth_authorization, data={'token': token}) if not 200 <= response.status_code <= 299: if not self._try_legacy_token_auth(token=token): - raise ASFAuthenticationError("Invalid/Expired token passed") + raise ASFAuthenticationError('Invalid/Expired token passed') ASF_LOGGER.info('EDL token authentication successful') self._update_edl_token(token=token) @@ -201,16 +195,16 @@ def _try_legacy_token_auth(self, token: str) -> False: from asf_search.constants import INTERNAL if self.cmr_host != INTERNAL.CMR_HOST: - self.headers.update({"Authorization": "Bearer {0}".format(token)}) - legacy_auth_url = f"https://{self.cmr_host}{self.cmr_collections}" + self.headers.update({'Authorization': 'Bearer {0}'.format(token)}) + legacy_auth_url = f'https://{self.cmr_host}{self.cmr_collections}' response = self.get(legacy_auth_url) - self.headers.pop("Authorization") + self.headers.pop('Authorization') return 200 <= response.status_code <= 299 return False def _update_edl_token(self, token: str): - self.headers.update({"Authorization": "Bearer {0}".format(token)}) + self.headers.update({'Authorization': 'Bearer {0}'.format(token)}) def auth_with_cookiejar( self, @@ -224,13 +218,13 @@ def auth_with_cookiejar( :return ASFSession: returns self for convenience """ if not self._check_auth_cookies(cookies): - raise ASFAuthenticationError("Cookiejar does not contain login cookies") + raise ASFAuthenticationError('Cookiejar does not contain login cookies') for cookie in cookies: if cookie.is_expired(): - raise ASFAuthenticationError("Cookiejar contains expired cookies") + raise ASFAuthenticationError('Cookiejar contains expired cookies') - token = cookies.get_dict().get("urs-access-token") + token = cookies.get_dict().get('urs-access-token') if token is None: ASF_LOGGER.warning( 'Failed to find EDL Token in cookiejar. ' @@ -238,9 +232,7 @@ def auth_with_cookiejar( 'required for hidden/restricted dataset access.' ) else: - ASF_LOGGER.info( - 'Authenticating EDL token found in "urs-access-token" cookie' - ) + ASF_LOGGER.info('Authenticating EDL token found in "urs-access-token" cookie') try: self.auth_with_token(token) except ASFAuthenticationError: @@ -262,9 +254,7 @@ def _check_auth_cookies( return any(cookie in self.auth_cookie_names for cookie in cookies) - def rebuild_auth( - self, prepared_request: requests.Request, response: requests.Response - ): + def rebuild_auth(self, prepared_request: requests.Request, response: requests.Response): """ Overrides requests.Session.rebuild_auth() default behavior of stripping the Authorization header @@ -274,17 +264,14 @@ def rebuild_auth( headers = prepared_request.headers url = prepared_request.url - if "Authorization" in headers: - original_domain = ".".join( - self._get_domain(response.request.url).split(".")[-3:] - ) - redirect_domain = ".".join(self._get_domain(url).split(".")[-3:]) + if 'Authorization' in headers: + original_domain = '.'.join(self._get_domain(response.request.url).split('.')[-3:]) + redirect_domain = '.'.join(self._get_domain(url).split('.')[-3:]) if original_domain != redirect_domain and ( - original_domain not in self.auth_domains - or redirect_domain not in self.auth_domains + original_domain not in self.auth_domains or redirect_domain not in self.auth_domains ): - del headers["Authorization"] + del headers['Authorization'] new_auth = get_netrc_auth(url) if self.trust_env else None if new_auth is not None: @@ -299,12 +286,12 @@ def __getstate__(self): state = super().__getstate__() state = { **state, - "edl_host": self.edl_host, - "edl_client_id": self.edl_client_id, - "asf_auth_host": self.asf_auth_host, - "cmr_host": self.cmr_host, - "cmr_collections": self.cmr_collections, - "auth_domains": self.auth_domains, - "auth_cookie_names": self.auth_cookie_names, + 'edl_host': self.edl_host, + 'edl_client_id': self.edl_client_id, + 'asf_auth_host': self.asf_auth_host, + 'cmr_host': self.cmr_host, + 'cmr_collections': self.cmr_collections, + 'auth_domains': self.auth_domains, + 'auth_cookie_names': self.auth_cookie_names, } return state diff --git a/asf_search/ASFStackableProduct.py b/asf_search/ASFStackableProduct.py index 92fd3b5b..4fc7378e 100644 --- a/asf_search/ASFStackableProduct.py +++ b/asf_search/ASFStackableProduct.py @@ -36,9 +36,9 @@ def get_baseline_calc_properties(self) -> Dict: float, self.umm_get( self.umm, - "AdditionalAttributes", - ("Name", "INSAR_BASELINE"), - "Values", + 'AdditionalAttributes', + ('Name', 'INSAR_BASELINE'), + 'Values', 0, ), ) @@ -46,26 +46,26 @@ def get_baseline_calc_properties(self) -> Dict: if insarBaseline is None: return None - return {"insarBaseline": insarBaseline} + return {'insarBaseline': insarBaseline} def get_stack_opts(self, opts: ASFSearchOptions = None): stack_opts = ASFSearchOptions() if opts is None else copy(opts) stack_opts.processingLevel = self.get_default_baseline_product_type() - if self.properties.get("insarStackId") in [None, "NA", 0, "0"]: + if self.properties.get('insarStackId') in [None, 'NA', 0, '0']: raise ASFBaselineError( 'Requested reference product needs a baseline stack ID ' f'but does not have one: {self.properties["fileID"]}' ) - stack_opts.insarStackId = self.properties["insarStackId"] + stack_opts.insarStackId = self.properties['insarStackId'] return stack_opts def is_valid_reference(self): # we don't stack at all if any of stack is missing insarBaseline, # unlike stacking S1 products(?) - if "insarBaseline" not in self.baseline: - raise ValueError("No baseline values available for precalculated dataset") + if 'insarBaseline' not in self.baseline: + raise ValueError('No baseline values available for precalculated dataset') return True diff --git a/asf_search/CMR/MissionList.py b/asf_search/CMR/MissionList.py index 27c7b1f2..b7060605 100644 --- a/asf_search/CMR/MissionList.py +++ b/asf_search/CMR/MissionList.py @@ -14,13 +14,13 @@ def get_campaigns(data) -> Dict: :return: Dictionary containing CMR umm_json response """ - response = requests.post(f"https://{CMR_HOST}{CMR_COLLECTIONS_PATH}", data=data) + response = requests.post(f'https://{CMR_HOST}{CMR_COLLECTIONS_PATH}', data=data) if response.status_code != 200: - raise CMRError(f"CMR_ERROR {response.status_code}: {response.text}") + raise CMRError(f'CMR_ERROR {response.status_code}: {response.text}') try: data = response.json() except Exception as e: - raise CMRError(f"CMR_ERROR: Error parsing JSON from CMR: {e}") + raise CMRError(f'CMR_ERROR: Error parsing JSON from CMR: {e}') return data diff --git a/asf_search/CMR/datasets.py b/asf_search/CMR/datasets.py index 8c825ac6..c517bf7a 100644 --- a/asf_search/CMR/datasets.py +++ b/asf_search/CMR/datasets.py @@ -2,1402 +2,1402 @@ dataset_collections = { - "NISAR": { - "NISAR_NEN_RRST_BETA_V1": [ - "C1261815181-ASFDEV", - "C1261815288-ASF", - "C2850220296-ASF", - ], - "NISAR_NEN_RRST_PROVISIONAL_V1": [ - "C1261832381-ASFDEV", - "C1261832657-ASF", - "C2853068083-ASF", - ], - "NISAR_NEN_RRST_V1": [ - "C1256533420-ASFDEV", - "C1257349121-ASF", - "C2727902012-ASF", - ], - "NISAR_L0A_RRST_BETA_V1": [ - "C1261813453-ASFDEV", - "C1261815147-ASF", - "C2850223384-ASF", - ], - "NISAR_L0A_RRST_PROVISIONAL_V1": [ - "C1261832466-ASFDEV", - "C1261832658-ASF", - "C2853086824-ASF", - ], - "NISAR_L0A_RRST_V1": [ - "C1256524081-ASFDEV", - "C1257349120-ASF", - "C2727901263-ASF", - ], - "NISAR_L0B_RRSD_BETA_V1": [ - "C1261815274-ASFDEV", - "C1261815289-ASF", - "C2850224301-ASF", - ], - "NISAR_L0B_RRSD_PROVISIONAL_V1": [ - "C1261832497-ASFDEV", - "C1261832659-ASF", - "C2853089814-ASF", - ], - "NISAR_L0B_RRSD_V1": [ - "C1256358262-ASFDEV", - "C1257349115-ASF", - "C2727901639-ASF", - ], - "NISAR_L0B_CRSD_BETA_V1": [ - "C1261815276-ASFDEV", - "C1261815301-ASF", - "C2850225137-ASF", - ], - "NISAR_L0B_CRSD_PROVISIONAL_V1": [ - "C1261832632-ASFDEV", - "C1261832671-ASF", - "C2853091612-ASF", - ], - "NISAR_L0B_CRSD_V1": [ - "C1256358463-ASFDEV", - "C1257349114-ASF", - "C2727901523-ASF", - ], - "NISAR_L1_RSLC_BETA_V1": [ - "C1261813489-ASFDEV", - "C1261815148-ASF", - "C2850225585-ASF", - ], - "NISAR_L1_RSLC_PROVISIONAL_V1": [ - "C1261832868-ASFDEV", - "C1261833052-ASF", - "C2853145197-ASF", - ], - "NISAR_L1_RSLC_V1": [ - "C1256363301-ASFDEV", - "C1257349109-ASF", - "C2727900439-ASF", - ], - "NISAR_L1_RIFG_BETA_V1": [ - "C1261819086-ASFDEV", - "C1261819120-ASF", - "C2850234202-ASF", - ], - "NISAR_L1_RIFG_PROVISIONAL_V1": [ - "C1261832940-ASFDEV", - "C1261833063-ASF", - "C2853147928-ASF", - ], - "NISAR_L1_RIFG_V1": [ - "C1256381769-ASFDEV", - "C1257349108-ASF", - "C2723110181-ASF", - ], - "NISAR_L1_RUNW_BETA_V1": [ - "C1261819098-ASFDEV", - "C1261819121-ASF", - "C2850235455-ASF", - ], - "NISAR_L1_RUNW_PROVISIONAL_V1": [ - "C1261832990-ASFDEV", - "C1261833064-ASF", - "C2853153429-ASF", - ], - "NISAR_L1_RUNW_V1": [ - "C1256420738-ASFDEV", - "C1257349107-ASF", - "C2727900827-ASF", - ], - "NISAR_L1_ROFF_BETA_V1": [ - "C1261819110-ASFDEV", - "C1261819145-ASF", - "C2850237619-ASF", - ], - "NISAR_L1_ROFF_PROVISIONAL_V1": [ - "C1261832993-ASFDEV", - "C1261833076-ASF", - "C2853156054-ASF", - ], - "NISAR_L1_ROFF_V1": [ - "C1256411631-ASFDEV", - "C1257349103-ASF", - "C2727900080-ASF", - ], - "NISAR_L2_GSLC_BETA_V1": [ - "C1261819167-ASFDEV", - "C1261819258-ASF", - "C2850259510-ASF", - ], - "NISAR_L2_GSLC_PROVISIONAL_V1": [ - "C1261833024-ASFDEV", - "C1261833127-ASF", - "C2854332392-ASF", - ], - "NISAR_L2_GSLC_V1": [ - "C1256413628-ASFDEV", - "C1257349102-ASF", - "C2727896667-ASF", - ], - "NISAR_L2_GUNW_BETA_V1": [ - "C1261819168-ASFDEV", - "C1261819270-ASF", - "C2850261892-ASF", - ], - "NISAR_L2_GUNW_PROVISIONAL_V1": [ - "C1261833025-ASFDEV", - "C1261846741-ASF", - "C2854335566-ASF", - ], - "NISAR_L2_GUNW_V1": [ - "C1256432264-ASFDEV", - "C1257349096-ASF", - "C2727897718-ASF", - ], - "NISAR_L2_GCOV_BETA_V1": [ - "C1261819211-ASFDEV", - "C1261819275-ASF", - "C2850262927-ASF", - ], - "NISAR_L2_GCOV_PROVISIONAL_V1": [ - "C1261833026-ASFDEV", - "C1261846880-ASF", - "C2854338529-ASF", - ], - "NISAR_L2_GCOV_V1": [ - "C1256477304-ASFDEV", - "C1257349095-ASF", - "C2727896018-ASF", - ], - "NISAR_L2_GOFF_BETA_V1": [ - "C1261819233-ASFDEV", - "C1261819281-ASF", - "C2850263910-ASF", - ], - "NISAR_L2_GOFF_PROVISIONAL_V1": [ - "C1261833027-ASFDEV", - "C1261846994-ASF", - "C2854341702-ASF", - ], - "NISAR_L2_GOFF_V1": [ - "C1256479237-ASFDEV", - "C1257349094-ASF", - "C2727896460-ASF", - ], - "NISAR_L3_SME2_BETA_V1": [ - "C1261819245-ASFDEV", - "C1261819282-ASF", - "C2850265000-ASF", - ], - "NISAR_L3_SME2_PROVISIONAL_V1": [ - "C1261833050-ASFDEV", - "C1261847095-ASF", - "C2854344945-ASF", - ], - "NISAR_L3_SME2_V1": [ - "C1256568692-ASFDEV", - "C1257349093-ASF", - "C2727894546-ASF", - ], - "NISAR_CUSTOM_PROVISIONAL_V1": [ - "C1262134528-ASFDEV", - "C1262135006-ASF", - "C2874824964-ASF", + 'NISAR': { + 'NISAR_NEN_RRST_BETA_V1': [ + 'C1261815181-ASFDEV', + 'C1261815288-ASF', + 'C2850220296-ASF', + ], + 'NISAR_NEN_RRST_PROVISIONAL_V1': [ + 'C1261832381-ASFDEV', + 'C1261832657-ASF', + 'C2853068083-ASF', + ], + 'NISAR_NEN_RRST_V1': [ + 'C1256533420-ASFDEV', + 'C1257349121-ASF', + 'C2727902012-ASF', + ], + 'NISAR_L0A_RRST_BETA_V1': [ + 'C1261813453-ASFDEV', + 'C1261815147-ASF', + 'C2850223384-ASF', + ], + 'NISAR_L0A_RRST_PROVISIONAL_V1': [ + 'C1261832466-ASFDEV', + 'C1261832658-ASF', + 'C2853086824-ASF', + ], + 'NISAR_L0A_RRST_V1': [ + 'C1256524081-ASFDEV', + 'C1257349120-ASF', + 'C2727901263-ASF', + ], + 'NISAR_L0B_RRSD_BETA_V1': [ + 'C1261815274-ASFDEV', + 'C1261815289-ASF', + 'C2850224301-ASF', + ], + 'NISAR_L0B_RRSD_PROVISIONAL_V1': [ + 'C1261832497-ASFDEV', + 'C1261832659-ASF', + 'C2853089814-ASF', + ], + 'NISAR_L0B_RRSD_V1': [ + 'C1256358262-ASFDEV', + 'C1257349115-ASF', + 'C2727901639-ASF', + ], + 'NISAR_L0B_CRSD_BETA_V1': [ + 'C1261815276-ASFDEV', + 'C1261815301-ASF', + 'C2850225137-ASF', + ], + 'NISAR_L0B_CRSD_PROVISIONAL_V1': [ + 'C1261832632-ASFDEV', + 'C1261832671-ASF', + 'C2853091612-ASF', + ], + 'NISAR_L0B_CRSD_V1': [ + 'C1256358463-ASFDEV', + 'C1257349114-ASF', + 'C2727901523-ASF', + ], + 'NISAR_L1_RSLC_BETA_V1': [ + 'C1261813489-ASFDEV', + 'C1261815148-ASF', + 'C2850225585-ASF', + ], + 'NISAR_L1_RSLC_PROVISIONAL_V1': [ + 'C1261832868-ASFDEV', + 'C1261833052-ASF', + 'C2853145197-ASF', + ], + 'NISAR_L1_RSLC_V1': [ + 'C1256363301-ASFDEV', + 'C1257349109-ASF', + 'C2727900439-ASF', + ], + 'NISAR_L1_RIFG_BETA_V1': [ + 'C1261819086-ASFDEV', + 'C1261819120-ASF', + 'C2850234202-ASF', + ], + 'NISAR_L1_RIFG_PROVISIONAL_V1': [ + 'C1261832940-ASFDEV', + 'C1261833063-ASF', + 'C2853147928-ASF', + ], + 'NISAR_L1_RIFG_V1': [ + 'C1256381769-ASFDEV', + 'C1257349108-ASF', + 'C2723110181-ASF', + ], + 'NISAR_L1_RUNW_BETA_V1': [ + 'C1261819098-ASFDEV', + 'C1261819121-ASF', + 'C2850235455-ASF', + ], + 'NISAR_L1_RUNW_PROVISIONAL_V1': [ + 'C1261832990-ASFDEV', + 'C1261833064-ASF', + 'C2853153429-ASF', + ], + 'NISAR_L1_RUNW_V1': [ + 'C1256420738-ASFDEV', + 'C1257349107-ASF', + 'C2727900827-ASF', + ], + 'NISAR_L1_ROFF_BETA_V1': [ + 'C1261819110-ASFDEV', + 'C1261819145-ASF', + 'C2850237619-ASF', + ], + 'NISAR_L1_ROFF_PROVISIONAL_V1': [ + 'C1261832993-ASFDEV', + 'C1261833076-ASF', + 'C2853156054-ASF', + ], + 'NISAR_L1_ROFF_V1': [ + 'C1256411631-ASFDEV', + 'C1257349103-ASF', + 'C2727900080-ASF', + ], + 'NISAR_L2_GSLC_BETA_V1': [ + 'C1261819167-ASFDEV', + 'C1261819258-ASF', + 'C2850259510-ASF', + ], + 'NISAR_L2_GSLC_PROVISIONAL_V1': [ + 'C1261833024-ASFDEV', + 'C1261833127-ASF', + 'C2854332392-ASF', + ], + 'NISAR_L2_GSLC_V1': [ + 'C1256413628-ASFDEV', + 'C1257349102-ASF', + 'C2727896667-ASF', + ], + 'NISAR_L2_GUNW_BETA_V1': [ + 'C1261819168-ASFDEV', + 'C1261819270-ASF', + 'C2850261892-ASF', + ], + 'NISAR_L2_GUNW_PROVISIONAL_V1': [ + 'C1261833025-ASFDEV', + 'C1261846741-ASF', + 'C2854335566-ASF', + ], + 'NISAR_L2_GUNW_V1': [ + 'C1256432264-ASFDEV', + 'C1257349096-ASF', + 'C2727897718-ASF', + ], + 'NISAR_L2_GCOV_BETA_V1': [ + 'C1261819211-ASFDEV', + 'C1261819275-ASF', + 'C2850262927-ASF', + ], + 'NISAR_L2_GCOV_PROVISIONAL_V1': [ + 'C1261833026-ASFDEV', + 'C1261846880-ASF', + 'C2854338529-ASF', + ], + 'NISAR_L2_GCOV_V1': [ + 'C1256477304-ASFDEV', + 'C1257349095-ASF', + 'C2727896018-ASF', + ], + 'NISAR_L2_GOFF_BETA_V1': [ + 'C1261819233-ASFDEV', + 'C1261819281-ASF', + 'C2850263910-ASF', + ], + 'NISAR_L2_GOFF_PROVISIONAL_V1': [ + 'C1261833027-ASFDEV', + 'C1261846994-ASF', + 'C2854341702-ASF', + ], + 'NISAR_L2_GOFF_V1': [ + 'C1256479237-ASFDEV', + 'C1257349094-ASF', + 'C2727896460-ASF', + ], + 'NISAR_L3_SME2_BETA_V1': [ + 'C1261819245-ASFDEV', + 'C1261819282-ASF', + 'C2850265000-ASF', + ], + 'NISAR_L3_SME2_PROVISIONAL_V1': [ + 'C1261833050-ASFDEV', + 'C1261847095-ASF', + 'C2854344945-ASF', + ], + 'NISAR_L3_SME2_V1': [ + 'C1256568692-ASFDEV', + 'C1257349093-ASF', + 'C2727894546-ASF', + ], + 'NISAR_CUSTOM_PROVISIONAL_V1': [ + 'C1262134528-ASFDEV', + 'C1262135006-ASF', + 'C2874824964-ASF', ], }, - "SENTINEL-1": { - "SENTINEL-1A_SLC": ["C1214470488-ASF", "C1205428742-ASF", "C1234413245-ASFDEV"], - "SENTINEL-1B_SLC": ["C1327985661-ASF", "C1216244348-ASF", "C1234413263-ASFDEV"], - "SENTINEL-1A_DP_GRD_HIGH": [ - "C1214470533-ASF", - "C1212201032-ASF", - "C1234413229-ASFDEV", - ], - "SENTINEL-1A_DP_META_GRD_HIGH": [ - "C1214470576-ASF", - "C1212209226-ASF", - "C1234413232-ASFDEV", - ], - "SENTINEL-1B_DP_GRD_HIGH": [ - "C1327985645-ASF", - "C1216244589-ASF", - "C1234413247-ASFDEV", - ], - "SENTINEL-1A_META_SLC": [ - "C1214470496-ASF", - "C1208117434-ASF", - "C1234413236-ASFDEV", - ], - "SENTINEL-1A_META_RAW": [ - "C1214470532-ASF", - "C1208115009-ASF", - "C1234413235-ASFDEV", - ], - "SENTINEL-1A_OCN": ["C1214472977-ASF", "C1212212560-ASF", "C1234413237-ASFDEV"], - "SENTINEL-1A_DP_META_GRD_MEDIUM": [ - "C1214472336-ASF", - "C1212212493-ASF", - "C1234413233-ASFDEV", - ], - "SENTINEL-1A_META_OCN": [ - "C1266376001-ASF", - "C1215704763-ASF", - "C1234413234-ASFDEV", - ], - "SENTINEL-1A_SP_META_GRD_HIGH": [ - "C1214470732-ASF", - "C1212158326-ASF", - "C1234413243-ASFDEV", - ], - "SENTINEL-1B_DP_GRD_MEDIUM": [ - "C1327985660-ASF", - "C1216244594-ASF", - "C1234413248-ASFDEV", - ], - "SENTINEL-1B_DP_META_GRD_HIGH": [ - "C1327985741-ASF", - "C1216244601-ASF", - "C1234413250-ASFDEV", - ], - "SENTINEL-1B_DP_META_GRD_MEDIUM": [ - "C1327985578-ASF", - "C1216244591-ASF", - "C1234413251-ASFDEV", - ], - "SENTINEL-1B_META_RAW": [ - "C1327985650-ASF", - "C1216244595-ASF", - "C1234413253-ASFDEV", - ], - "SENTINEL-1B_META_SLC": [ - "C1327985617-ASF", - "C1216244585-ASF", - "C1234413254-ASFDEV", - ], - "SENTINEL-1B_OCN": ["C1327985579-ASF", "C1216244593-ASF", "C1234413255-ASFDEV"], - "SENTINEL-1B_SP_META_GRD_HIGH": [ - "C1327985619-ASF", - "C1216244587-ASF", - "C1234413261-ASFDEV", - ], - "SENTINEL-1A_SP_GRD_MEDIUM": [ - "C1214472994-ASF", - "C1212158318-ASF", - "C1234413241-ASFDEV", - ], - "SENTINEL-1A_SP_META_GRD_MEDIUM": [ - "C1214473170-ASF", - "C1212233976-ASF", - "C1234413244-ASFDEV", - ], - "SENTINEL-1B_META_OCN": [ - "C1327985646-ASF", - "C1216244590-ASF", - "C1234413252-ASFDEV", - ], - "SENTINEL-1B_SP_GRD_MEDIUM": [ - "C1327985740-ASF", - "C1216244600-ASF", - "C1234413259-ASFDEV", - ], - "SENTINEL-1B_SP_META_GRD_MEDIUM": [ - "C1327985739-ASF", - "C1216244598-ASF", - "C1234413262-ASFDEV", - ], - "SENTINEL-1A_RAW": ["C1214470561-ASF", "C1205264459-ASF", "C1234413238-ASFDEV"], - "SENTINEL-1A_DP_GRD_MEDIUM": [ - "C1214471521-ASF", - "C1212209035-ASF", - "C1234413230-ASFDEV", - ], - "SENTINEL-1A_SP_GRD_HIGH": [ - "C1214470682-ASF", - "C1212158327-ASF", - "C1234413240-ASFDEV", - ], - "SENTINEL-1B_RAW": ["C1327985647-ASF", "C1216244592-ASF", "C1234413256-ASFDEV"], - "SENTINEL-1A_DP_GRD_FULL": [ - "C1214471197-ASF", - "C1212200781-ASF", - "C1234413228-ASFDEV", - ], - "SENTINEL-1A_DP_META_GRD_FULL": [ - "C1214471960-ASF", - "C1212209075-ASF", - "C1234413231-ASFDEV", - ], - "SENTINEL-1A_SP_GRD_FULL": ["C1214472978-ASF", "C1234413239-ASFDEV"], - "SENTINEL-1A_SP_META_GRD_FULL": ["C1214473165-ASF", "C1234413242-ASFDEV"], - "SENTINEL-1B_DP_GRD_FULL": [ - "C1327985697-ASF", - "C1216244597-ASF", - "C1234413246-ASFDEV", - ], - "SENTINEL-1B_DP_META_GRD_FULL": [ - "C1327985651-ASF", - "C1216244596-ASF", - "C1234413249-ASFDEV", - ], - "SENTINEL-1B_SP_GRD_FULL": [ - "C1327985644-ASF", - "C1216244588-ASF", - "C1234413257-ASFDEV", - ], - "SENTINEL-1B_SP_GRD_HIGH": [ - "C1327985571-ASF", - "C1216244586-ASF", - "C1234413258-ASFDEV", - ], - "SENTINEL-1B_SP_META_GRD_FULL": [ - "C1327985674-ASF", - "C1216244599-ASF", - "C1234413260-ASFDEV", - ], - "S1_Bursts": ["C1244552887-ASFDEV"], - "SENTINEL-1_BURSTS_DEV10": ["C1257175154-ASFDEV"], - "Sentinel-1_Burst_Map": ["C1244598379-ASFDEV"], - "Various Browse Images": ["C1240784657-ASFDEV"], + 'SENTINEL-1': { + 'SENTINEL-1A_SLC': ['C1214470488-ASF', 'C1205428742-ASF', 'C1234413245-ASFDEV'], + 'SENTINEL-1B_SLC': ['C1327985661-ASF', 'C1216244348-ASF', 'C1234413263-ASFDEV'], + 'SENTINEL-1A_DP_GRD_HIGH': [ + 'C1214470533-ASF', + 'C1212201032-ASF', + 'C1234413229-ASFDEV', + ], + 'SENTINEL-1A_DP_META_GRD_HIGH': [ + 'C1214470576-ASF', + 'C1212209226-ASF', + 'C1234413232-ASFDEV', + ], + 'SENTINEL-1B_DP_GRD_HIGH': [ + 'C1327985645-ASF', + 'C1216244589-ASF', + 'C1234413247-ASFDEV', + ], + 'SENTINEL-1A_META_SLC': [ + 'C1214470496-ASF', + 'C1208117434-ASF', + 'C1234413236-ASFDEV', + ], + 'SENTINEL-1A_META_RAW': [ + 'C1214470532-ASF', + 'C1208115009-ASF', + 'C1234413235-ASFDEV', + ], + 'SENTINEL-1A_OCN': ['C1214472977-ASF', 'C1212212560-ASF', 'C1234413237-ASFDEV'], + 'SENTINEL-1A_DP_META_GRD_MEDIUM': [ + 'C1214472336-ASF', + 'C1212212493-ASF', + 'C1234413233-ASFDEV', + ], + 'SENTINEL-1A_META_OCN': [ + 'C1266376001-ASF', + 'C1215704763-ASF', + 'C1234413234-ASFDEV', + ], + 'SENTINEL-1A_SP_META_GRD_HIGH': [ + 'C1214470732-ASF', + 'C1212158326-ASF', + 'C1234413243-ASFDEV', + ], + 'SENTINEL-1B_DP_GRD_MEDIUM': [ + 'C1327985660-ASF', + 'C1216244594-ASF', + 'C1234413248-ASFDEV', + ], + 'SENTINEL-1B_DP_META_GRD_HIGH': [ + 'C1327985741-ASF', + 'C1216244601-ASF', + 'C1234413250-ASFDEV', + ], + 'SENTINEL-1B_DP_META_GRD_MEDIUM': [ + 'C1327985578-ASF', + 'C1216244591-ASF', + 'C1234413251-ASFDEV', + ], + 'SENTINEL-1B_META_RAW': [ + 'C1327985650-ASF', + 'C1216244595-ASF', + 'C1234413253-ASFDEV', + ], + 'SENTINEL-1B_META_SLC': [ + 'C1327985617-ASF', + 'C1216244585-ASF', + 'C1234413254-ASFDEV', + ], + 'SENTINEL-1B_OCN': ['C1327985579-ASF', 'C1216244593-ASF', 'C1234413255-ASFDEV'], + 'SENTINEL-1B_SP_META_GRD_HIGH': [ + 'C1327985619-ASF', + 'C1216244587-ASF', + 'C1234413261-ASFDEV', + ], + 'SENTINEL-1A_SP_GRD_MEDIUM': [ + 'C1214472994-ASF', + 'C1212158318-ASF', + 'C1234413241-ASFDEV', + ], + 'SENTINEL-1A_SP_META_GRD_MEDIUM': [ + 'C1214473170-ASF', + 'C1212233976-ASF', + 'C1234413244-ASFDEV', + ], + 'SENTINEL-1B_META_OCN': [ + 'C1327985646-ASF', + 'C1216244590-ASF', + 'C1234413252-ASFDEV', + ], + 'SENTINEL-1B_SP_GRD_MEDIUM': [ + 'C1327985740-ASF', + 'C1216244600-ASF', + 'C1234413259-ASFDEV', + ], + 'SENTINEL-1B_SP_META_GRD_MEDIUM': [ + 'C1327985739-ASF', + 'C1216244598-ASF', + 'C1234413262-ASFDEV', + ], + 'SENTINEL-1A_RAW': ['C1214470561-ASF', 'C1205264459-ASF', 'C1234413238-ASFDEV'], + 'SENTINEL-1A_DP_GRD_MEDIUM': [ + 'C1214471521-ASF', + 'C1212209035-ASF', + 'C1234413230-ASFDEV', + ], + 'SENTINEL-1A_SP_GRD_HIGH': [ + 'C1214470682-ASF', + 'C1212158327-ASF', + 'C1234413240-ASFDEV', + ], + 'SENTINEL-1B_RAW': ['C1327985647-ASF', 'C1216244592-ASF', 'C1234413256-ASFDEV'], + 'SENTINEL-1A_DP_GRD_FULL': [ + 'C1214471197-ASF', + 'C1212200781-ASF', + 'C1234413228-ASFDEV', + ], + 'SENTINEL-1A_DP_META_GRD_FULL': [ + 'C1214471960-ASF', + 'C1212209075-ASF', + 'C1234413231-ASFDEV', + ], + 'SENTINEL-1A_SP_GRD_FULL': ['C1214472978-ASF', 'C1234413239-ASFDEV'], + 'SENTINEL-1A_SP_META_GRD_FULL': ['C1214473165-ASF', 'C1234413242-ASFDEV'], + 'SENTINEL-1B_DP_GRD_FULL': [ + 'C1327985697-ASF', + 'C1216244597-ASF', + 'C1234413246-ASFDEV', + ], + 'SENTINEL-1B_DP_META_GRD_FULL': [ + 'C1327985651-ASF', + 'C1216244596-ASF', + 'C1234413249-ASFDEV', + ], + 'SENTINEL-1B_SP_GRD_FULL': [ + 'C1327985644-ASF', + 'C1216244588-ASF', + 'C1234413257-ASFDEV', + ], + 'SENTINEL-1B_SP_GRD_HIGH': [ + 'C1327985571-ASF', + 'C1216244586-ASF', + 'C1234413258-ASFDEV', + ], + 'SENTINEL-1B_SP_META_GRD_FULL': [ + 'C1327985674-ASF', + 'C1216244599-ASF', + 'C1234413260-ASFDEV', + ], + 'S1_Bursts': ['C1244552887-ASFDEV'], + 'SENTINEL-1_BURSTS_DEV10': ['C1257175154-ASFDEV'], + 'Sentinel-1_Burst_Map': ['C1244598379-ASFDEV'], + 'Various Browse Images': ['C1240784657-ASFDEV'], }, - "OPERA-S1": { - "OPERA_L2_CSLC-S1_V1": ["C2777443834-ASF", "C1259976861-ASF"], - "OPERA_L2_RTC-S1_V1": ["C2777436413-ASF", "C1259974840-ASF"], - "OPERA_L2_CSLC-S1-STATIC_PROVISIONAL_V0": ["C1258354200-ASF"], - "OPERA_L2_CSLC-S1-STATIC_V1": ["C1259982010-ASF", "C2795135668-ASF"], - "OPERA_L2_CSLC-S1_PROVISIONAL_V0": ["C1257995185-ASF"], - "OPERA_L2_RTC-S1-STATIC_PROVISIONAL_V0": ["C1258354201-ASF"], - "OPERA_L2_RTC-S1-STATIC_V1": ["C1259981910-ASF", "C2795135174-ASF"], - "OPERA_L2_RTC-S1_PROVISIONAL_V0": ["C1257995186-ASF"], + 'OPERA-S1': { + 'OPERA_L2_CSLC-S1_V1': ['C2777443834-ASF', 'C1259976861-ASF'], + 'OPERA_L2_RTC-S1_V1': ['C2777436413-ASF', 'C1259974840-ASF'], + 'OPERA_L2_CSLC-S1-STATIC_PROVISIONAL_V0': ['C1258354200-ASF'], + 'OPERA_L2_CSLC-S1-STATIC_V1': ['C1259982010-ASF', 'C2795135668-ASF'], + 'OPERA_L2_CSLC-S1_PROVISIONAL_V0': ['C1257995185-ASF'], + 'OPERA_L2_RTC-S1-STATIC_PROVISIONAL_V0': ['C1258354201-ASF'], + 'OPERA_L2_RTC-S1-STATIC_V1': ['C1259981910-ASF', 'C2795135174-ASF'], + 'OPERA_L2_RTC-S1_PROVISIONAL_V0': ['C1257995186-ASF'], }, - "OPERA-S1-CALVAL": { - "OPERA_L2_CSLC-S1_CALVAL_V1": ["C1260721945-ASF", "C2803501758-ASF"], - "OPERA_L2_RTC-S1_CALVAL_V1": ["C1260721853-ASF", "C2803501097-ASF"], + 'OPERA-S1-CALVAL': { + 'OPERA_L2_CSLC-S1_CALVAL_V1': ['C1260721945-ASF', 'C2803501758-ASF'], + 'OPERA_L2_RTC-S1_CALVAL_V1': ['C1260721853-ASF', 'C2803501097-ASF'], }, - "SLC-BURST": {"SENTINEL-1_BURSTS": ["C2709161906-ASF", "C1257024016-ASF"]}, - "ALOS PALSAR": { - "ALOS_PSR_RTC_HIGH": ["C1206487504-ASF", "C1207181535-ASF"], - "ALOS_PSR_L1.5": ["C1206485940-ASF", "C1205261223-ASF"], - "ALOS_PSR_RTC_LOW": ["C1206487217-ASF", "C1208013295-ASF"], - "ALOS_PSR_KMZ": ["C1206156901-ASF", "C1207019609-ASF"], - "ALOS_PSR_L1.0": ["C1206485320-ASF"], - "ALOS_PSR_L1.1": ["C1206485527-ASF", "C1207710476-ASF", "C1239611505-ASFDEV"], - "ALOS_PSR_L2.2": ["C2011599335-ASF", "C1239927797-ASF", "C1238733834-ASFDEV"], - "ALOS_PALSAR_INSAR_METADATA": ["C1229740239-ASF"], + 'SLC-BURST': {'SENTINEL-1_BURSTS': ['C2709161906-ASF', 'C1257024016-ASF']}, + 'ALOS PALSAR': { + 'ALOS_PSR_RTC_HIGH': ['C1206487504-ASF', 'C1207181535-ASF'], + 'ALOS_PSR_L1.5': ['C1206485940-ASF', 'C1205261223-ASF'], + 'ALOS_PSR_RTC_LOW': ['C1206487217-ASF', 'C1208013295-ASF'], + 'ALOS_PSR_KMZ': ['C1206156901-ASF', 'C1207019609-ASF'], + 'ALOS_PSR_L1.0': ['C1206485320-ASF'], + 'ALOS_PSR_L1.1': ['C1206485527-ASF', 'C1207710476-ASF', 'C1239611505-ASFDEV'], + 'ALOS_PSR_L2.2': ['C2011599335-ASF', 'C1239927797-ASF', 'C1238733834-ASFDEV'], + 'ALOS_PALSAR_INSAR_METADATA': ['C1229740239-ASF'], }, - "ALOS AVNIR-2": { - "ALOS_AVNIR_OBS_ORI": [ - "C1808440897-ASF", - "C1233629671-ASF", - "C1234413224-ASFDEV", + 'ALOS AVNIR-2': { + 'ALOS_AVNIR_OBS_ORI': [ + 'C1808440897-ASF', + 'C1233629671-ASF', + 'C1234413224-ASFDEV', ], - "ALOS_AVNIR_OBS_ORI_BROWSE": ["C1234712303-ASF"], + 'ALOS_AVNIR_OBS_ORI_BROWSE': ['C1234712303-ASF'], }, - "SIR-C": { - "STS-59_BROWSE_GRD": [ - "C1661710578-ASF", - "C1226557819-ASF", - "C1234413264-ASFDEV", - ], - "STS-59_BROWSE_SLC": [ - "C1661710581-ASF", - "C1226557809-ASF", - "C1234413265-ASFDEV", - ], - "STS-59_GRD": ["C1661710583-ASF", "C1226557808-ASF", "C1234413266-ASFDEV"], - "STS-59_META_GRD": ["C1661710586-ASF", "C1226557810-ASF", "C1234413267-ASFDEV"], - "STS-59_META_SLC": ["C1661710588-ASF", "C1226557811-ASF", "C1234413268-ASFDEV"], - "STS-59_SLC": ["C1661710590-ASF", "C1226557812-ASF", "C1234413269-ASFDEV"], - "STS-68_BROWSE_GRD": [ - "C1661710593-ASF", - "C1226557813-ASF", - "C1234413270-ASFDEV", - ], - "STS-68_BROWSE_SLC": [ - "C1661710596-ASF", - "C1226557814-ASF", - "C1234413271-ASFDEV", - ], - "STS-68_GRD": ["C1661710597-ASF", "C1226557815-ASF", "C1234413272-ASFDEV"], - "STS-68_META_GRD": ["C1661710600-ASF", "C1226557816-ASF", "C1234413273-ASFDEV"], - "STS-68_META_SLC": ["C1661710603-ASF", "C1226557817-ASF", "C1234413274-ASFDEV"], - "STS-68_SLC": ["C1661710604-ASF", "C1226557818-ASF", "C1234413275-ASFDEV"], + 'SIR-C': { + 'STS-59_BROWSE_GRD': [ + 'C1661710578-ASF', + 'C1226557819-ASF', + 'C1234413264-ASFDEV', + ], + 'STS-59_BROWSE_SLC': [ + 'C1661710581-ASF', + 'C1226557809-ASF', + 'C1234413265-ASFDEV', + ], + 'STS-59_GRD': ['C1661710583-ASF', 'C1226557808-ASF', 'C1234413266-ASFDEV'], + 'STS-59_META_GRD': ['C1661710586-ASF', 'C1226557810-ASF', 'C1234413267-ASFDEV'], + 'STS-59_META_SLC': ['C1661710588-ASF', 'C1226557811-ASF', 'C1234413268-ASFDEV'], + 'STS-59_SLC': ['C1661710590-ASF', 'C1226557812-ASF', 'C1234413269-ASFDEV'], + 'STS-68_BROWSE_GRD': [ + 'C1661710593-ASF', + 'C1226557813-ASF', + 'C1234413270-ASFDEV', + ], + 'STS-68_BROWSE_SLC': [ + 'C1661710596-ASF', + 'C1226557814-ASF', + 'C1234413271-ASFDEV', + ], + 'STS-68_GRD': ['C1661710597-ASF', 'C1226557815-ASF', 'C1234413272-ASFDEV'], + 'STS-68_META_GRD': ['C1661710600-ASF', 'C1226557816-ASF', 'C1234413273-ASFDEV'], + 'STS-68_META_SLC': ['C1661710603-ASF', 'C1226557817-ASF', 'C1234413274-ASFDEV'], + 'STS-68_SLC': ['C1661710604-ASF', 'C1226557818-ASF', 'C1234413275-ASFDEV'], }, - "ARIA S1 GUNW": { - "SENTINEL-1_INTERFEROGRAMS": ["C1595422627-ASF", "C1225776654-ASF"], - "SENTINEL-1_INTERFEROGRAMS_AMPLITUDE": ["C1596065640-ASF", "C1225776655-ASF"], - "SENTINEL-1_INTERFEROGRAMS_COHERENCE": ["C1596065639-ASF", "C1225776657-ASF"], - "SENTINEL-1_INTERFEROGRAMS_CONNECTED_COMPONENTS": [ - "C1596065641-ASF", - "C1225776658-ASF", - ], - "SENTINEL-1_INTERFEROGRAMS_UNWRAPPED_PHASE": [ - "C1595765183-ASF", - "C1225776659-ASF", - ], - "ARIA_S1_GUNW": ["C2859376221-ASF", "C1261881077-ASF"], + 'ARIA S1 GUNW': { + 'SENTINEL-1_INTERFEROGRAMS': ['C1595422627-ASF', 'C1225776654-ASF'], + 'SENTINEL-1_INTERFEROGRAMS_AMPLITUDE': ['C1596065640-ASF', 'C1225776655-ASF'], + 'SENTINEL-1_INTERFEROGRAMS_COHERENCE': ['C1596065639-ASF', 'C1225776657-ASF'], + 'SENTINEL-1_INTERFEROGRAMS_CONNECTED_COMPONENTS': [ + 'C1596065641-ASF', + 'C1225776658-ASF', + ], + 'SENTINEL-1_INTERFEROGRAMS_UNWRAPPED_PHASE': [ + 'C1595765183-ASF', + 'C1225776659-ASF', + ], + 'ARIA_S1_GUNW': ['C2859376221-ASF', 'C1261881077-ASF'], }, - "SMAP": { - "SPL1A_RO_METADATA_003": ["C1243122884-ASF", "C1233103964-ASF"], - "SPL1A_RO_QA_003": ["C1243124139-ASF", "C1216074923-ASF"], - "SPL1A_001": ["C1214473171-ASF", "C1212243761-ASF"], - "SPL1A_002": ["C1243149604-ASF", "C1213091807-ASF"], - "SPL1A_METADATA_001": ["C1214473426-ASF", "C1212243437-ASF"], - "SPL1A_METADATA_002": ["C1243119801-ASF", "C1213096699-ASF"], - "SPL1A_QA_001": ["C1214473839-ASF", "C1212249653-ASF"], - "SPL1A_QA_002": ["C1243133204-ASF", "C1213101573-ASF"], - "SPL1A_RO_001": ["C1243197402-ASF"], - "SPL1A_RO_002": ["C1243215430-ASF", "C1213136240-ASF"], - "SPL1A_RO_003": ["C1243124754-ASF", "C1216074755-ASF"], - "SPL1A_RO_METADATA_001": ["C1243141638-ASF", "C1213136752-ASF"], - "SPL1A_RO_METADATA_002": ["C1243162394-ASF", "C1213136799-ASF"], - "SPL1A_RO_QA_001": ["C1243168733-ASF", "C1213136709-ASF"], - "SPL1A_RO_QA_002": ["C1243168866-ASF", "C1213136844-ASF"], - "SPL1B_SO_LoRes_001": ["C1214473308-ASF", "C1212249811-ASF"], - "SPL1B_SO_LoRes_002": ["C1243253631-ASF", "C1213125007-ASF"], - "SPL1B_SO_LoRes_003": ["C1243133445-ASF", "C1216074919-ASF"], - "SPL1B_SO_LoRes_METADATA_001": ["C1214473550-ASF", "C1212196951-ASF"], - "SPL1B_SO_LoRes_METADATA_002": ["C1243197502-ASF", "C1213115690-ASF"], - "SPL1B_SO_LoRes_METADATA_003": ["C1243126328-ASF", "C1216074758-ASF"], - "SPL1B_SO_LoRes_QA_001": ["C1214474243-ASF", "C1212243666-ASF"], - "SPL1B_SO_LoRes_QA_002": ["C1243216659-ASF", "C1213115896-ASF"], - "SPL1B_SO_LoRes_QA_003": ["C1243129847-ASF", "C1216074761-ASF"], - "SPL1C_S0_HiRes_001": ["C1214473367-ASF", "C1212250364-ASF"], - "SPL1C_S0_HiRes_002": ["C1243268956-ASF", "C1213134622-ASF"], - "SPL1C_S0_HiRes_003": ["C1243144528-ASF", "C1216074770-ASF"], - "SPL1C_S0_HiRes_METADATA_001": ["C1214473624-ASF", "C1212246173-ASF"], - "SPL1C_S0_HiRes_METADATA_002": ["C1243228612-ASF", "C1213125156-ASF"], - "SPL1C_S0_HiRes_METADATA_003": ["C1243136142-ASF", "C1216074764-ASF"], - "SPL1C_S0_HiRes_QA_001": ["C1214474435-ASF", "C1212249773-ASF"], - "SPL1C_S0_HiRes_QA_002": ["C1243255360-ASF", "C1213134486-ASF"], - "SPL1C_S0_HiRes_QA_003": ["C1243140611-ASF", "C1233101609-ASF"], - "SPL1A_003": ["C1216074922-ASF"], - "SPL1A_METADATA_003": ["C1216074750-ASF"], - "SPL1A_QA_003": ["C1216074751-ASF"], + 'SMAP': { + 'SPL1A_RO_METADATA_003': ['C1243122884-ASF', 'C1233103964-ASF'], + 'SPL1A_RO_QA_003': ['C1243124139-ASF', 'C1216074923-ASF'], + 'SPL1A_001': ['C1214473171-ASF', 'C1212243761-ASF'], + 'SPL1A_002': ['C1243149604-ASF', 'C1213091807-ASF'], + 'SPL1A_METADATA_001': ['C1214473426-ASF', 'C1212243437-ASF'], + 'SPL1A_METADATA_002': ['C1243119801-ASF', 'C1213096699-ASF'], + 'SPL1A_QA_001': ['C1214473839-ASF', 'C1212249653-ASF'], + 'SPL1A_QA_002': ['C1243133204-ASF', 'C1213101573-ASF'], + 'SPL1A_RO_001': ['C1243197402-ASF'], + 'SPL1A_RO_002': ['C1243215430-ASF', 'C1213136240-ASF'], + 'SPL1A_RO_003': ['C1243124754-ASF', 'C1216074755-ASF'], + 'SPL1A_RO_METADATA_001': ['C1243141638-ASF', 'C1213136752-ASF'], + 'SPL1A_RO_METADATA_002': ['C1243162394-ASF', 'C1213136799-ASF'], + 'SPL1A_RO_QA_001': ['C1243168733-ASF', 'C1213136709-ASF'], + 'SPL1A_RO_QA_002': ['C1243168866-ASF', 'C1213136844-ASF'], + 'SPL1B_SO_LoRes_001': ['C1214473308-ASF', 'C1212249811-ASF'], + 'SPL1B_SO_LoRes_002': ['C1243253631-ASF', 'C1213125007-ASF'], + 'SPL1B_SO_LoRes_003': ['C1243133445-ASF', 'C1216074919-ASF'], + 'SPL1B_SO_LoRes_METADATA_001': ['C1214473550-ASF', 'C1212196951-ASF'], + 'SPL1B_SO_LoRes_METADATA_002': ['C1243197502-ASF', 'C1213115690-ASF'], + 'SPL1B_SO_LoRes_METADATA_003': ['C1243126328-ASF', 'C1216074758-ASF'], + 'SPL1B_SO_LoRes_QA_001': ['C1214474243-ASF', 'C1212243666-ASF'], + 'SPL1B_SO_LoRes_QA_002': ['C1243216659-ASF', 'C1213115896-ASF'], + 'SPL1B_SO_LoRes_QA_003': ['C1243129847-ASF', 'C1216074761-ASF'], + 'SPL1C_S0_HiRes_001': ['C1214473367-ASF', 'C1212250364-ASF'], + 'SPL1C_S0_HiRes_002': ['C1243268956-ASF', 'C1213134622-ASF'], + 'SPL1C_S0_HiRes_003': ['C1243144528-ASF', 'C1216074770-ASF'], + 'SPL1C_S0_HiRes_METADATA_001': ['C1214473624-ASF', 'C1212246173-ASF'], + 'SPL1C_S0_HiRes_METADATA_002': ['C1243228612-ASF', 'C1213125156-ASF'], + 'SPL1C_S0_HiRes_METADATA_003': ['C1243136142-ASF', 'C1216074764-ASF'], + 'SPL1C_S0_HiRes_QA_001': ['C1214474435-ASF', 'C1212249773-ASF'], + 'SPL1C_S0_HiRes_QA_002': ['C1243255360-ASF', 'C1213134486-ASF'], + 'SPL1C_S0_HiRes_QA_003': ['C1243140611-ASF', 'C1233101609-ASF'], + 'SPL1A_003': ['C1216074922-ASF'], + 'SPL1A_METADATA_003': ['C1216074750-ASF'], + 'SPL1A_QA_003': ['C1216074751-ASF'], }, - "UAVSAR": { - "UAVSAR_POL_META": ["C1214353986-ASF", "C1210487703-ASF"], - "UAVSAR_INSAR_META": ["C1214336717-ASF", "C1212030772-ASF"], - "UAVSAR_INSAR_INT": ["C1214336045-ASF", "C1212001698-ASF"], - "UAVSAR_INSAR_AMP": ["C1214335430-ASF", "C1206116665-ASF"], - "UAVSAR_INSAR_AMP_GRD": ["C1214335471-ASF", "C1206132445-ASF"], - "UAVSAR_INSAR_DEM": ["C1214335903-ASF", "C1211962154-ASF"], - "UAVSAR_INSAR_INT_GRD": ["C1214336154-ASF", "C1212005594-ASF"], - "UAVSAR_INSAR_KMZ": ["C1214336554-ASF", "C1212019993-ASF"], - "UAVSAR_POL_DEM": ["C1214353593-ASF", "C1207638502-ASF"], - "UAVSAR_POL_INC": ["C1214353754-ASF", "C1210025872-ASF"], - "UAVSAR_POL_KMZ": ["C1214353859-ASF", "C1210485039-ASF"], - "UAVSAR_POL_ML_CMPLX_GRD": ["C1214337770-ASF", "C1207188317-ASF"], - "UAVSAR_POL_ML_CMPLX_GRD_3X3": ["C1214354144-ASF", "C1210546638-ASF"], - "UAVSAR_POL_ML_CMPLX_GRD_5X5": ["C1214354235-ASF", "C1206122195-ASF"], - "UAVSAR_POL_ML_CMPLX_SLANT": ["C1214343609-ASF", "C1209970710-ASF"], - "UAVSAR_POL_PAULI": ["C1214354031-ASF", "C1207038647-ASF"], - "UAVSAR_POL_SLOPE": ["C1214408428-ASF", "C1210599503-ASF"], - "UAVSAR_POL_STOKES": ["C1214419355-ASF", "C1210599673-ASF"], + 'UAVSAR': { + 'UAVSAR_POL_META': ['C1214353986-ASF', 'C1210487703-ASF'], + 'UAVSAR_INSAR_META': ['C1214336717-ASF', 'C1212030772-ASF'], + 'UAVSAR_INSAR_INT': ['C1214336045-ASF', 'C1212001698-ASF'], + 'UAVSAR_INSAR_AMP': ['C1214335430-ASF', 'C1206116665-ASF'], + 'UAVSAR_INSAR_AMP_GRD': ['C1214335471-ASF', 'C1206132445-ASF'], + 'UAVSAR_INSAR_DEM': ['C1214335903-ASF', 'C1211962154-ASF'], + 'UAVSAR_INSAR_INT_GRD': ['C1214336154-ASF', 'C1212005594-ASF'], + 'UAVSAR_INSAR_KMZ': ['C1214336554-ASF', 'C1212019993-ASF'], + 'UAVSAR_POL_DEM': ['C1214353593-ASF', 'C1207638502-ASF'], + 'UAVSAR_POL_INC': ['C1214353754-ASF', 'C1210025872-ASF'], + 'UAVSAR_POL_KMZ': ['C1214353859-ASF', 'C1210485039-ASF'], + 'UAVSAR_POL_ML_CMPLX_GRD': ['C1214337770-ASF', 'C1207188317-ASF'], + 'UAVSAR_POL_ML_CMPLX_GRD_3X3': ['C1214354144-ASF', 'C1210546638-ASF'], + 'UAVSAR_POL_ML_CMPLX_GRD_5X5': ['C1214354235-ASF', 'C1206122195-ASF'], + 'UAVSAR_POL_ML_CMPLX_SLANT': ['C1214343609-ASF', 'C1209970710-ASF'], + 'UAVSAR_POL_PAULI': ['C1214354031-ASF', 'C1207038647-ASF'], + 'UAVSAR_POL_SLOPE': ['C1214408428-ASF', 'C1210599503-ASF'], + 'UAVSAR_POL_STOKES': ['C1214419355-ASF', 'C1210599673-ASF'], }, - "RADARSAT-1": { - "RSAT-1_L0": ["C1206897141-ASF"], - "RSAT-1_L1": ["C1206936391-ASF", "C1205181982-ASF"], - "RSAT-1_POLAR_YEAR_ANTARCTICA_L1": ["C1215670813-ASF"], - "RSAT-1_POLAR_YEAR_GREENLAND_L0": ["C1215709884-ASF"], - "RSAT-1_POLAR_YEAR_GREENLAND_L1": ["C1215709880-ASF"], - "RSAT-1_POLAR_YEAR_KAMCHATKA_L1": ["C1215714443-ASF"], - "RSAT-1_POLAR_YEAR_SEA_ICE_MIN_MAX_L1": ["C1215775284-ASF"], - "RSAT-1_POLAR_YEAR_TOOLIK_L1": ["C1215614037-ASF"], + 'RADARSAT-1': { + 'RSAT-1_L0': ['C1206897141-ASF'], + 'RSAT-1_L1': ['C1206936391-ASF', 'C1205181982-ASF'], + 'RSAT-1_POLAR_YEAR_ANTARCTICA_L1': ['C1215670813-ASF'], + 'RSAT-1_POLAR_YEAR_GREENLAND_L0': ['C1215709884-ASF'], + 'RSAT-1_POLAR_YEAR_GREENLAND_L1': ['C1215709880-ASF'], + 'RSAT-1_POLAR_YEAR_KAMCHATKA_L1': ['C1215714443-ASF'], + 'RSAT-1_POLAR_YEAR_SEA_ICE_MIN_MAX_L1': ['C1215775284-ASF'], + 'RSAT-1_POLAR_YEAR_TOOLIK_L1': ['C1215614037-ASF'], }, - "ERS": { - "ERS-1_L0": ["C1210197768-ASF", "C1205261222-ASF"], - "ERS-1_L1": ["C1211627521-ASF", "C1205302527-ASF"], - "ERS-2_L0": ["C1208794942-ASF", "C1207143701-ASF"], - "ERS-2_L1": ["C1209373626-ASF", "C1207144966-ASF"], + 'ERS': { + 'ERS-1_L0': ['C1210197768-ASF', 'C1205261222-ASF'], + 'ERS-1_L1': ['C1211627521-ASF', 'C1205302527-ASF'], + 'ERS-2_L0': ['C1208794942-ASF', 'C1207143701-ASF'], + 'ERS-2_L1': ['C1209373626-ASF', 'C1207144966-ASF'], }, - "JERS-1": { - "JERS-1_L0": ["C1208662092-ASF", "C1207175327-ASF"], - "JERS-1_L1": ["C1207933168-ASF", "C1207177736-ASF"], + 'JERS-1': { + 'JERS-1_L0': ['C1208662092-ASF', 'C1207175327-ASF'], + 'JERS-1_L1': ['C1207933168-ASF', 'C1207177736-ASF'], }, - "AIRSAR": { - "AIRSAR_POL_3FP": ["C1213921661-ASF", "C1205256880-ASF"], - "AIRSAR_INT_JPG": ["C1213921626-ASF", "C1000000306-ASF"], - "AIRSAR_POL_SYN_3FP": ["C1213928843-ASF", "C1208713702-ASF"], - "AIRSAR_TOP_C-DEM_STOKES": ["C1213927035-ASF", "C1208707768-ASF"], - "AIRSAR_TOP_DEM": ["C179001730-ASF", "C1208655639-ASF"], - "AIRSAR_TOP_DEM_C": ["C1213925022-ASF", "C1208680681-ASF"], - "AIRSAR_TOP_DEM_L": ["C1213926419-ASF", "C1208691361-ASF"], - "AIRSAR_TOP_DEM_P": ["C1213926777-ASF", "C1208703384-ASF"], - "AIRSAR_TOP_L-STOKES": ["C1213927939-ASF"], - "AIRSAR_TOP_P-STOKES": ["C1213928209-ASF"], - "AIRSAR_INT": ["C1208652494-ASF"], + 'AIRSAR': { + 'AIRSAR_POL_3FP': ['C1213921661-ASF', 'C1205256880-ASF'], + 'AIRSAR_INT_JPG': ['C1213921626-ASF', 'C1000000306-ASF'], + 'AIRSAR_POL_SYN_3FP': ['C1213928843-ASF', 'C1208713702-ASF'], + 'AIRSAR_TOP_C-DEM_STOKES': ['C1213927035-ASF', 'C1208707768-ASF'], + 'AIRSAR_TOP_DEM': ['C179001730-ASF', 'C1208655639-ASF'], + 'AIRSAR_TOP_DEM_C': ['C1213925022-ASF', 'C1208680681-ASF'], + 'AIRSAR_TOP_DEM_L': ['C1213926419-ASF', 'C1208691361-ASF'], + 'AIRSAR_TOP_DEM_P': ['C1213926777-ASF', 'C1208703384-ASF'], + 'AIRSAR_TOP_L-STOKES': ['C1213927939-ASF'], + 'AIRSAR_TOP_P-STOKES': ['C1213928209-ASF'], + 'AIRSAR_INT': ['C1208652494-ASF'], }, - "SEASAT": { - "SEASAT_SAR_L1_TIFF": ["C1206500826-ASF", "C1206752770-ASF"], - "SEASAT_SAR_L1_HDF5": ["C1206500991-ASF", "C1206144699-ASF"], + 'SEASAT': { + 'SEASAT_SAR_L1_TIFF': ['C1206500826-ASF', 'C1206752770-ASF'], + 'SEASAT_SAR_L1_HDF5': ['C1206500991-ASF', 'C1206144699-ASF'], }, } collections_per_platform = { - "SENTINEL-1A": [ - "C2803501758-ASF", - "C2803501097-ASF", - "C1214470488-ASF", - "C1214470533-ASF", - "C1214470576-ASF", - "C1595422627-ASF", - "C2859376221-ASF", - "C1261881077-ASF", - "C1214470496-ASF", - "C1214470532-ASF", - "C1214472977-ASF", - "C1214472336-ASF", - "C1266376001-ASF", - "C1214472994-ASF", - "C1214470732-ASF", - "C1214473170-ASF", - "C1214470561-ASF", - "C1214471521-ASF", - "C1214470682-ASF", - "C2777443834-ASF", - "C2777436413-ASF", - "C1214471197-ASF", - "C1214471960-ASF", - "C1214472978-ASF", - "C1214473165-ASF", - "C2709161906-ASF", - "C1596065640-ASF", - "C1596065639-ASF", - "C1596065641-ASF", - "C1595765183-ASF", - "C2450786986-ASF", - "C1205428742-ASF", - "C1212201032-ASF", - "C1212212560-ASF", - "C1205264459-ASF", - "C1208117434-ASF", - "C1212209035-ASF", - "C1212209226-ASF", - "C1208115009-ASF", - "C1212158327-ASF", - "C1215704763-ASF", - "C1225776654-ASF", - "C1212158318-ASF", - "C1212212493-ASF", - "C1212158326-ASF", - "C1212233976-ASF", - "C1260726384-ASF", - "C1258354200-ASF", - "C1259982010-ASF", - "C2795135668-ASF", - "C1260721945-ASF", - "C1257995185-ASF", - "C1259976861-ASF", - "C1258354201-ASF", - "C1259981910-ASF", - "C2795135174-ASF", - "C1260721853-ASF", - "C1257995186-ASF", - "C1259974840-ASF", - "C1212200781-ASF", - "C1212209075-ASF", - "C1257024016-ASF", - "C1225776655-ASF", - "C1225776657-ASF", - "C1225776658-ASF", - "C1225776659-ASF", - "C1245953394-ASF", - "C1234413245-ASFDEV", - "C1234413229-ASFDEV", - "C1234413237-ASFDEV", - "C1234413238-ASFDEV", - "C1234413236-ASFDEV", - "C1234413230-ASFDEV", - "C1234413232-ASFDEV", - "C1234413235-ASFDEV", - "C1234413240-ASFDEV", - "C1234413234-ASFDEV", - "C1234413241-ASFDEV", - "C1234413233-ASFDEV", - "C1234413243-ASFDEV", - "C1234413244-ASFDEV", - "C1244552887-ASFDEV", - "C1234413228-ASFDEV", - "C1234413231-ASFDEV", - "C1234413239-ASFDEV", - "C1234413242-ASFDEV", - "C1257175154-ASFDEV", - "C1244598379-ASFDEV", - "C1240784657-ASFDEV", + 'SENTINEL-1A': [ + 'C2803501758-ASF', + 'C2803501097-ASF', + 'C1214470488-ASF', + 'C1214470533-ASF', + 'C1214470576-ASF', + 'C1595422627-ASF', + 'C2859376221-ASF', + 'C1261881077-ASF', + 'C1214470496-ASF', + 'C1214470532-ASF', + 'C1214472977-ASF', + 'C1214472336-ASF', + 'C1266376001-ASF', + 'C1214472994-ASF', + 'C1214470732-ASF', + 'C1214473170-ASF', + 'C1214470561-ASF', + 'C1214471521-ASF', + 'C1214470682-ASF', + 'C2777443834-ASF', + 'C2777436413-ASF', + 'C1214471197-ASF', + 'C1214471960-ASF', + 'C1214472978-ASF', + 'C1214473165-ASF', + 'C2709161906-ASF', + 'C1596065640-ASF', + 'C1596065639-ASF', + 'C1596065641-ASF', + 'C1595765183-ASF', + 'C2450786986-ASF', + 'C1205428742-ASF', + 'C1212201032-ASF', + 'C1212212560-ASF', + 'C1205264459-ASF', + 'C1208117434-ASF', + 'C1212209035-ASF', + 'C1212209226-ASF', + 'C1208115009-ASF', + 'C1212158327-ASF', + 'C1215704763-ASF', + 'C1225776654-ASF', + 'C1212158318-ASF', + 'C1212212493-ASF', + 'C1212158326-ASF', + 'C1212233976-ASF', + 'C1260726384-ASF', + 'C1258354200-ASF', + 'C1259982010-ASF', + 'C2795135668-ASF', + 'C1260721945-ASF', + 'C1257995185-ASF', + 'C1259976861-ASF', + 'C1258354201-ASF', + 'C1259981910-ASF', + 'C2795135174-ASF', + 'C1260721853-ASF', + 'C1257995186-ASF', + 'C1259974840-ASF', + 'C1212200781-ASF', + 'C1212209075-ASF', + 'C1257024016-ASF', + 'C1225776655-ASF', + 'C1225776657-ASF', + 'C1225776658-ASF', + 'C1225776659-ASF', + 'C1245953394-ASF', + 'C1234413245-ASFDEV', + 'C1234413229-ASFDEV', + 'C1234413237-ASFDEV', + 'C1234413238-ASFDEV', + 'C1234413236-ASFDEV', + 'C1234413230-ASFDEV', + 'C1234413232-ASFDEV', + 'C1234413235-ASFDEV', + 'C1234413240-ASFDEV', + 'C1234413234-ASFDEV', + 'C1234413241-ASFDEV', + 'C1234413233-ASFDEV', + 'C1234413243-ASFDEV', + 'C1234413244-ASFDEV', + 'C1244552887-ASFDEV', + 'C1234413228-ASFDEV', + 'C1234413231-ASFDEV', + 'C1234413239-ASFDEV', + 'C1234413242-ASFDEV', + 'C1257175154-ASFDEV', + 'C1244598379-ASFDEV', + 'C1240784657-ASFDEV', ], - "SENTINEL-1B": [ - "C2803501758-ASF", - "C2803501097-ASF", - "C1327985661-ASF", - "C1327985645-ASF", - "C1595422627-ASF", - "C1327985617-ASF", - "C1327985660-ASF", - "C1327985741-ASF", - "C1327985578-ASF", - "C1327985646-ASF", - "C1327985650-ASF", - "C1327985579-ASF", - "C1327985740-ASF", - "C1327985619-ASF", - "C1327985739-ASF", - "C1327985647-ASF", - "C2777443834-ASF", - "C2777436413-ASF", - "C1327985697-ASF", - "C1327985651-ASF", - "C1327985644-ASF", - "C1327985571-ASF", - "C1327985674-ASF", - "C2709161906-ASF", - "C1596065640-ASF", - "C1596065639-ASF", - "C1596065641-ASF", - "C1595765183-ASF", - "C2450786986-ASF", - "C1216244348-ASF", - "C1216244589-ASF", - "C1216244594-ASF", - "C1216244593-ASF", - "C1216244585-ASF", - "C1216244592-ASF", - "C1216244595-ASF", - "C1225776654-ASF", - "C1216244590-ASF", - "C1216244601-ASF", - "C1216244600-ASF", - "C1216244591-ASF", - "C1216244587-ASF", - "C1216244598-ASF", - "C1216244586-ASF", - "C1260726384-ASF", - "C1258354200-ASF", - "C1259982010-ASF", - "C2795135668-ASF", - "C1260721945-ASF", - "C1257995185-ASF", - "C1259976861-ASF", - "C1258354201-ASF", - "C1259981910-ASF", - "C2795135174-ASF", - "C1260721853-ASF", - "C1257995186-ASF", - "C1259974840-ASF", - "C1216244597-ASF", - "C1216244596-ASF", - "C1216244588-ASF", - "C1216244599-ASF", - "C1257024016-ASF", - "C1225776655-ASF", - "C1225776657-ASF", - "C1225776658-ASF", - "C1225776659-ASF", - "C1245953394-ASF", - "C1234413263-ASFDEV", - "C1234413247-ASFDEV", - "C1234413248-ASFDEV", - "C1234413255-ASFDEV", - "C1234413254-ASFDEV", - "C1234413256-ASFDEV", - "C1234413253-ASFDEV", - "C1234413252-ASFDEV", - "C1234413250-ASFDEV", - "C1234413259-ASFDEV", - "C1234413251-ASFDEV", - "C1234413261-ASFDEV", - "C1234413262-ASFDEV", - "C1234413258-ASFDEV", - "C1244552887-ASFDEV", - "C1234413246-ASFDEV", - "C1234413249-ASFDEV", - "C1234413257-ASFDEV", - "C1234413260-ASFDEV", - "C1257175154-ASFDEV", - "C1244598379-ASFDEV", + 'SENTINEL-1B': [ + 'C2803501758-ASF', + 'C2803501097-ASF', + 'C1327985661-ASF', + 'C1327985645-ASF', + 'C1595422627-ASF', + 'C1327985617-ASF', + 'C1327985660-ASF', + 'C1327985741-ASF', + 'C1327985578-ASF', + 'C1327985646-ASF', + 'C1327985650-ASF', + 'C1327985579-ASF', + 'C1327985740-ASF', + 'C1327985619-ASF', + 'C1327985739-ASF', + 'C1327985647-ASF', + 'C2777443834-ASF', + 'C2777436413-ASF', + 'C1327985697-ASF', + 'C1327985651-ASF', + 'C1327985644-ASF', + 'C1327985571-ASF', + 'C1327985674-ASF', + 'C2709161906-ASF', + 'C1596065640-ASF', + 'C1596065639-ASF', + 'C1596065641-ASF', + 'C1595765183-ASF', + 'C2450786986-ASF', + 'C1216244348-ASF', + 'C1216244589-ASF', + 'C1216244594-ASF', + 'C1216244593-ASF', + 'C1216244585-ASF', + 'C1216244592-ASF', + 'C1216244595-ASF', + 'C1225776654-ASF', + 'C1216244590-ASF', + 'C1216244601-ASF', + 'C1216244600-ASF', + 'C1216244591-ASF', + 'C1216244587-ASF', + 'C1216244598-ASF', + 'C1216244586-ASF', + 'C1260726384-ASF', + 'C1258354200-ASF', + 'C1259982010-ASF', + 'C2795135668-ASF', + 'C1260721945-ASF', + 'C1257995185-ASF', + 'C1259976861-ASF', + 'C1258354201-ASF', + 'C1259981910-ASF', + 'C2795135174-ASF', + 'C1260721853-ASF', + 'C1257995186-ASF', + 'C1259974840-ASF', + 'C1216244597-ASF', + 'C1216244596-ASF', + 'C1216244588-ASF', + 'C1216244599-ASF', + 'C1257024016-ASF', + 'C1225776655-ASF', + 'C1225776657-ASF', + 'C1225776658-ASF', + 'C1225776659-ASF', + 'C1245953394-ASF', + 'C1234413263-ASFDEV', + 'C1234413247-ASFDEV', + 'C1234413248-ASFDEV', + 'C1234413255-ASFDEV', + 'C1234413254-ASFDEV', + 'C1234413256-ASFDEV', + 'C1234413253-ASFDEV', + 'C1234413252-ASFDEV', + 'C1234413250-ASFDEV', + 'C1234413259-ASFDEV', + 'C1234413251-ASFDEV', + 'C1234413261-ASFDEV', + 'C1234413262-ASFDEV', + 'C1234413258-ASFDEV', + 'C1244552887-ASFDEV', + 'C1234413246-ASFDEV', + 'C1234413249-ASFDEV', + 'C1234413257-ASFDEV', + 'C1234413260-ASFDEV', + 'C1257175154-ASFDEV', + 'C1244598379-ASFDEV', ], - "STS-59": [ - "C1661710578-ASF", - "C1661710581-ASF", - "C1661710583-ASF", - "C1661710586-ASF", - "C1661710588-ASF", - "C1661710590-ASF", - "C1226557819-ASF", - "C1226557809-ASF", - "C1226557808-ASF", - "C1226557810-ASF", - "C1226557811-ASF", - "C1226557812-ASF", - "C1234413264-ASFDEV", - "C1234413265-ASFDEV", - "C1234413266-ASFDEV", - "C1234413267-ASFDEV", - "C1234413268-ASFDEV", - "C1234413269-ASFDEV", + 'STS-59': [ + 'C1661710578-ASF', + 'C1661710581-ASF', + 'C1661710583-ASF', + 'C1661710586-ASF', + 'C1661710588-ASF', + 'C1661710590-ASF', + 'C1226557819-ASF', + 'C1226557809-ASF', + 'C1226557808-ASF', + 'C1226557810-ASF', + 'C1226557811-ASF', + 'C1226557812-ASF', + 'C1234413264-ASFDEV', + 'C1234413265-ASFDEV', + 'C1234413266-ASFDEV', + 'C1234413267-ASFDEV', + 'C1234413268-ASFDEV', + 'C1234413269-ASFDEV', ], - "STS-68": [ - "C1661710593-ASF", - "C1661710596-ASF", - "C1661710597-ASF", - "C1661710600-ASF", - "C1661710603-ASF", - "C1661710604-ASF", - "C1226557813-ASF", - "C1226557814-ASF", - "C1226557815-ASF", - "C1226557816-ASF", - "C1226557817-ASF", - "C1226557818-ASF", - "C1234413270-ASFDEV", - "C1234413271-ASFDEV", - "C1234413272-ASFDEV", - "C1234413273-ASFDEV", - "C1234413274-ASFDEV", - "C1234413275-ASFDEV", + 'STS-68': [ + 'C1661710593-ASF', + 'C1661710596-ASF', + 'C1661710597-ASF', + 'C1661710600-ASF', + 'C1661710603-ASF', + 'C1661710604-ASF', + 'C1226557813-ASF', + 'C1226557814-ASF', + 'C1226557815-ASF', + 'C1226557816-ASF', + 'C1226557817-ASF', + 'C1226557818-ASF', + 'C1234413270-ASFDEV', + 'C1234413271-ASFDEV', + 'C1234413272-ASFDEV', + 'C1234413273-ASFDEV', + 'C1234413274-ASFDEV', + 'C1234413275-ASFDEV', ], - "ALOS": [ - "C1206487504-ASF", - "C1206487217-ASF", - "C1206485940-ASF", - "C1206156901-ASF", - "C1206485320-ASF", - "C1206485527-ASF", - "C1808440897-ASF", - "C2011599335-ASF", - "C1207181535-ASF", - "C1207710476-ASF", - "C1234712303-ASF", - "C1239927797-ASF", - "C1205261223-ASF", - "C1233629671-ASF", - "C1208013295-ASF", - "C1207019609-ASF", - "C1229740239-ASF", - "C1239611505-ASFDEV", - "C1238733834-ASFDEV", - "C1234413224-ASFDEV", + 'ALOS': [ + 'C1206487504-ASF', + 'C1206487217-ASF', + 'C1206485940-ASF', + 'C1206156901-ASF', + 'C1206485320-ASF', + 'C1206485527-ASF', + 'C1808440897-ASF', + 'C2011599335-ASF', + 'C1207181535-ASF', + 'C1207710476-ASF', + 'C1234712303-ASF', + 'C1239927797-ASF', + 'C1205261223-ASF', + 'C1233629671-ASF', + 'C1208013295-ASF', + 'C1207019609-ASF', + 'C1229740239-ASF', + 'C1239611505-ASFDEV', + 'C1238733834-ASFDEV', + 'C1234413224-ASFDEV', ], - "ERS-1": [ - "C1210197768-ASF", - "C1211627521-ASF", - "C1205261222-ASF", - "C1205302527-ASF", + 'ERS-1': [ + 'C1210197768-ASF', + 'C1211627521-ASF', + 'C1205261222-ASF', + 'C1205302527-ASF', ], - "ERS-2": [ - "C1208794942-ASF", - "C1209373626-ASF", - "C1207143701-ASF", - "C1207144966-ASF", + 'ERS-2': [ + 'C1208794942-ASF', + 'C1209373626-ASF', + 'C1207143701-ASF', + 'C1207144966-ASF', ], - "JERS-1": [ - "C1208662092-ASF", - "C1207933168-ASF", - "C1207175327-ASF", - "C1207177736-ASF", + 'JERS-1': [ + 'C1208662092-ASF', + 'C1207933168-ASF', + 'C1207175327-ASF', + 'C1207177736-ASF', ], - "RADARSAT-1": [ - "C1206897141-ASF", - "C1206936391-ASF", - "C1205181982-ASF", - "C1215670813-ASF", - "C1215709884-ASF", - "C1215709880-ASF", - "C1215714443-ASF", - "C1215775284-ASF", - "C1215614037-ASF", + 'RADARSAT-1': [ + 'C1206897141-ASF', + 'C1206936391-ASF', + 'C1205181982-ASF', + 'C1215670813-ASF', + 'C1215709884-ASF', + 'C1215709880-ASF', + 'C1215714443-ASF', + 'C1215775284-ASF', + 'C1215614037-ASF', ], - "DC-8": [ - "C1213921661-ASF", - "C1213921626-ASF", - "C1213928843-ASF", - "C1213927035-ASF", - "C179001730-ASF", - "C1213925022-ASF", - "C1213926419-ASF", - "C1213926777-ASF", - "C1213927939-ASF", - "C1213928209-ASF", - "C1205256880-ASF", - "C1208652494-ASF", - "C1000000306-ASF", - "C1208713702-ASF", - "C1208707768-ASF", - "C1208655639-ASF", - "C1208680681-ASF", - "C1208691361-ASF", - "C1208703384-ASF", + 'DC-8': [ + 'C1213921661-ASF', + 'C1213921626-ASF', + 'C1213928843-ASF', + 'C1213927035-ASF', + 'C179001730-ASF', + 'C1213925022-ASF', + 'C1213926419-ASF', + 'C1213926777-ASF', + 'C1213927939-ASF', + 'C1213928209-ASF', + 'C1205256880-ASF', + 'C1208652494-ASF', + 'C1000000306-ASF', + 'C1208713702-ASF', + 'C1208707768-ASF', + 'C1208655639-ASF', + 'C1208680681-ASF', + 'C1208691361-ASF', + 'C1208703384-ASF', ], - "SEASAT 1": [ - "C1206500826-ASF", - "C1206500991-ASF", - "C1206752770-ASF", - "C1206144699-ASF", + 'SEASAT 1': [ + 'C1206500826-ASF', + 'C1206500991-ASF', + 'C1206752770-ASF', + 'C1206144699-ASF', ], - "SMAP": [ - "C1243122884-ASF", - "C1243124139-ASF", - "C1214473171-ASF", - "C1243149604-ASF", - "C1214473426-ASF", - "C1243119801-ASF", - "C1214473839-ASF", - "C1243133204-ASF", - "C1243197402-ASF", - "C1243215430-ASF", - "C1243124754-ASF", - "C1243141638-ASF", - "C1243162394-ASF", - "C1243168733-ASF", - "C1243168866-ASF", - "C1214473308-ASF", - "C1243253631-ASF", - "C1243133445-ASF", - "C1214473550-ASF", - "C1243197502-ASF", - "C1243126328-ASF", - "C1214474243-ASF", - "C1243216659-ASF", - "C1243129847-ASF", - "C1214473367-ASF", - "C1243268956-ASF", - "C1243144528-ASF", - "C1214473624-ASF", - "C1243228612-ASF", - "C1243136142-ASF", - "C1214474435-ASF", - "C1243255360-ASF", - "C1243140611-ASF", - "C1233103964-ASF", - "C1216074923-ASF", - "C1212243761-ASF", - "C1213091807-ASF", - "C1216074922-ASF", - "C1212243437-ASF", - "C1213096699-ASF", - "C1216074750-ASF", - "C1212249653-ASF", - "C1213101573-ASF", - "C1216074751-ASF", - "C1213136240-ASF", - "C1216074755-ASF", - "C1213136752-ASF", - "C1213136799-ASF", - "C1213136709-ASF", - "C1213136844-ASF", - "C1212249811-ASF", - "C1213125007-ASF", - "C1216074919-ASF", - "C1212196951-ASF", - "C1213115690-ASF", - "C1216074758-ASF", - "C1212243666-ASF", - "C1213115896-ASF", - "C1216074761-ASF", - "C1212250364-ASF", - "C1213134622-ASF", - "C1216074770-ASF", - "C1212246173-ASF", - "C1213125156-ASF", - "C1216074764-ASF", - "C1212249773-ASF", - "C1213134486-ASF", - "C1233101609-ASF", + 'SMAP': [ + 'C1243122884-ASF', + 'C1243124139-ASF', + 'C1214473171-ASF', + 'C1243149604-ASF', + 'C1214473426-ASF', + 'C1243119801-ASF', + 'C1214473839-ASF', + 'C1243133204-ASF', + 'C1243197402-ASF', + 'C1243215430-ASF', + 'C1243124754-ASF', + 'C1243141638-ASF', + 'C1243162394-ASF', + 'C1243168733-ASF', + 'C1243168866-ASF', + 'C1214473308-ASF', + 'C1243253631-ASF', + 'C1243133445-ASF', + 'C1214473550-ASF', + 'C1243197502-ASF', + 'C1243126328-ASF', + 'C1214474243-ASF', + 'C1243216659-ASF', + 'C1243129847-ASF', + 'C1214473367-ASF', + 'C1243268956-ASF', + 'C1243144528-ASF', + 'C1214473624-ASF', + 'C1243228612-ASF', + 'C1243136142-ASF', + 'C1214474435-ASF', + 'C1243255360-ASF', + 'C1243140611-ASF', + 'C1233103964-ASF', + 'C1216074923-ASF', + 'C1212243761-ASF', + 'C1213091807-ASF', + 'C1216074922-ASF', + 'C1212243437-ASF', + 'C1213096699-ASF', + 'C1216074750-ASF', + 'C1212249653-ASF', + 'C1213101573-ASF', + 'C1216074751-ASF', + 'C1213136240-ASF', + 'C1216074755-ASF', + 'C1213136752-ASF', + 'C1213136799-ASF', + 'C1213136709-ASF', + 'C1213136844-ASF', + 'C1212249811-ASF', + 'C1213125007-ASF', + 'C1216074919-ASF', + 'C1212196951-ASF', + 'C1213115690-ASF', + 'C1216074758-ASF', + 'C1212243666-ASF', + 'C1213115896-ASF', + 'C1216074761-ASF', + 'C1212250364-ASF', + 'C1213134622-ASF', + 'C1216074770-ASF', + 'C1212246173-ASF', + 'C1213125156-ASF', + 'C1216074764-ASF', + 'C1212249773-ASF', + 'C1213134486-ASF', + 'C1233101609-ASF', ], - "G-III": [ - "C1214353986-ASF", - "C1214336045-ASF", - "C1214336717-ASF", - "C1214335430-ASF", - "C1214335471-ASF", - "C1214335903-ASF", - "C1214336154-ASF", - "C1214336554-ASF", - "C1214353593-ASF", - "C1214353754-ASF", - "C1214353859-ASF", - "C1214337770-ASF", - "C1214354144-ASF", - "C1214354235-ASF", - "C1214343609-ASF", - "C1214354031-ASF", - "C1214408428-ASF", - "C1214419355-ASF", - "C1210487703-ASF", - "C1212030772-ASF", - "C1206116665-ASF", - "C1206132445-ASF", - "C1211962154-ASF", - "C1212001698-ASF", - "C1212005594-ASF", - "C1212019993-ASF", - "C1207638502-ASF", - "C1210025872-ASF", - "C1210485039-ASF", - "C1207188317-ASF", - "C1210546638-ASF", - "C1206122195-ASF", - "C1209970710-ASF", - "C1207038647-ASF", - "C1210599503-ASF", - "C1210599673-ASF", + 'G-III': [ + 'C1214353986-ASF', + 'C1214336045-ASF', + 'C1214336717-ASF', + 'C1214335430-ASF', + 'C1214335471-ASF', + 'C1214335903-ASF', + 'C1214336154-ASF', + 'C1214336554-ASF', + 'C1214353593-ASF', + 'C1214353754-ASF', + 'C1214353859-ASF', + 'C1214337770-ASF', + 'C1214354144-ASF', + 'C1214354235-ASF', + 'C1214343609-ASF', + 'C1214354031-ASF', + 'C1214408428-ASF', + 'C1214419355-ASF', + 'C1210487703-ASF', + 'C1212030772-ASF', + 'C1206116665-ASF', + 'C1206132445-ASF', + 'C1211962154-ASF', + 'C1212001698-ASF', + 'C1212005594-ASF', + 'C1212019993-ASF', + 'C1207638502-ASF', + 'C1210025872-ASF', + 'C1210485039-ASF', + 'C1207188317-ASF', + 'C1210546638-ASF', + 'C1206122195-ASF', + 'C1209970710-ASF', + 'C1207038647-ASF', + 'C1210599503-ASF', + 'C1210599673-ASF', ], - "NISAR": [ + 'NISAR': [ # UAT ASFDEV - "C1261815181-ASFDEV", - "C1261832381-ASFDEV", - "C1256533420-ASFDEV", - "C1261813453-ASFDEV", - "C1261832466-ASFDEV", - "C1256524081-ASFDEV", - "C1261815274-ASFDEV", - "C1261832497-ASFDEV", - "C1256358262-ASFDEV", - "C1261815276-ASFDEV", - "C1261832632-ASFDEV", - "C1256358463-ASFDEV", - "C1261813489-ASFDEV", - "C1261832868-ASFDEV", - "C1256363301-ASFDEV", - "C1261819086-ASFDEV", - "C1261832940-ASFDEV", - "C1256381769-ASFDEV", - "C1261819098-ASFDEV", - "C1261832990-ASFDEV", - "C1256420738-ASFDEV", - "C1261819110-ASFDEV", - "C1261832993-ASFDEV", - "C1256411631-ASFDEV", - "C1261819167-ASFDEV", - "C1261833024-ASFDEV", - "C1256413628-ASFDEV", - "C1261819168-ASFDEV", - "C1261833025-ASFDEV", - "C1256432264-ASFDEV", - "C1261819211-ASFDEV", - "C1261833026-ASFDEV", - "C1256477304-ASFDEV", - "C1261819233-ASFDEV", - "C1261833027-ASFDEV", - "C1256479237-ASFDEV", - "C1261819245-ASFDEV", - "C1261833050-ASFDEV", - "C1256568692-ASFDEV", - "C1262134528-ASFDEV", + 'C1261815181-ASFDEV', + 'C1261832381-ASFDEV', + 'C1256533420-ASFDEV', + 'C1261813453-ASFDEV', + 'C1261832466-ASFDEV', + 'C1256524081-ASFDEV', + 'C1261815274-ASFDEV', + 'C1261832497-ASFDEV', + 'C1256358262-ASFDEV', + 'C1261815276-ASFDEV', + 'C1261832632-ASFDEV', + 'C1256358463-ASFDEV', + 'C1261813489-ASFDEV', + 'C1261832868-ASFDEV', + 'C1256363301-ASFDEV', + 'C1261819086-ASFDEV', + 'C1261832940-ASFDEV', + 'C1256381769-ASFDEV', + 'C1261819098-ASFDEV', + 'C1261832990-ASFDEV', + 'C1256420738-ASFDEV', + 'C1261819110-ASFDEV', + 'C1261832993-ASFDEV', + 'C1256411631-ASFDEV', + 'C1261819167-ASFDEV', + 'C1261833024-ASFDEV', + 'C1256413628-ASFDEV', + 'C1261819168-ASFDEV', + 'C1261833025-ASFDEV', + 'C1256432264-ASFDEV', + 'C1261819211-ASFDEV', + 'C1261833026-ASFDEV', + 'C1256477304-ASFDEV', + 'C1261819233-ASFDEV', + 'C1261833027-ASFDEV', + 'C1256479237-ASFDEV', + 'C1261819245-ASFDEV', + 'C1261833050-ASFDEV', + 'C1256568692-ASFDEV', + 'C1262134528-ASFDEV', # UAT - "C1261815288-ASF", - "C1261832657-ASF", - "C1257349121-ASF", - "C1261815147-ASF", - "C1261832658-ASF", - "C1257349120-ASF", - "C1261815289-ASF", - "C1261832659-ASF", - "C1257349115-ASF", - "C1261815301-ASF", - "C1261832671-ASF", - "C1257349114-ASF", - "C1261815148-ASF", - "C1261833052-ASF", - "C1257349109-ASF", - "C1261819120-ASF", - "C1261833063-ASF", - "C1257349108-ASF", - "C1261819121-ASF", - "C1261833064-ASF", - "C1257349107-ASF", - "C1261819145-ASF", - "C1261833076-ASF", - "C1257349103-ASF", - "C1261819258-ASF", - "C1261833127-ASF", - "C1257349102-ASF", - "C1261819270-ASF", - "C1261846741-ASF", - "C1257349096-ASF", - "C1261819275-ASF", - "C1261846880-ASF", - "C1257349095-ASF", - "C1261819281-ASF", - "C1261846994-ASF", - "C1257349094-ASF", - "C1261819282-ASF", - "C1261847095-ASF", - "C1257349093-ASF", - "C1262135006-ASF", + 'C1261815288-ASF', + 'C1261832657-ASF', + 'C1257349121-ASF', + 'C1261815147-ASF', + 'C1261832658-ASF', + 'C1257349120-ASF', + 'C1261815289-ASF', + 'C1261832659-ASF', + 'C1257349115-ASF', + 'C1261815301-ASF', + 'C1261832671-ASF', + 'C1257349114-ASF', + 'C1261815148-ASF', + 'C1261833052-ASF', + 'C1257349109-ASF', + 'C1261819120-ASF', + 'C1261833063-ASF', + 'C1257349108-ASF', + 'C1261819121-ASF', + 'C1261833064-ASF', + 'C1257349107-ASF', + 'C1261819145-ASF', + 'C1261833076-ASF', + 'C1257349103-ASF', + 'C1261819258-ASF', + 'C1261833127-ASF', + 'C1257349102-ASF', + 'C1261819270-ASF', + 'C1261846741-ASF', + 'C1257349096-ASF', + 'C1261819275-ASF', + 'C1261846880-ASF', + 'C1257349095-ASF', + 'C1261819281-ASF', + 'C1261846994-ASF', + 'C1257349094-ASF', + 'C1261819282-ASF', + 'C1261847095-ASF', + 'C1257349093-ASF', + 'C1262135006-ASF', # PROD - "C2850220296-ASF", - "C2853068083-ASF", - "C2727902012-ASF", - "C2850223384-ASF", - "C2853086824-ASF", - "C2727901263-ASF", - "C2850224301-ASF", - "C2853089814-ASF", - "C2727901639-ASF", - "C2850225137-ASF", - "C2853091612-ASF", - "C2727901523-ASF", - "C2850225585-ASF", - "C2853145197-ASF", - "C2727900439-ASF", - "C2850234202-ASF", - "C2853147928-ASF", - "C2723110181-ASF", - "C2850235455-ASF", - "C2853153429-ASF", - "C2727900827-ASF", - "C2850237619-ASF", - "C2853156054-ASF", - "C2727900080-ASF", - "C2850259510-ASF", - "C2854332392-ASF", - "C2727896667-ASF", - "C2850261892-ASF", - "C2854335566-ASF", - "C2727897718-ASF", - "C2850262927-ASF", - "C2854338529-ASF", - "C2727896018-ASF", - "C2850263910-ASF", - "C2854341702-ASF", - "C2727896460-ASF", - "C2850265000-ASF", - "C2854344945-ASF", - "C2727894546-ASF", - "C2874824964-ASF", + 'C2850220296-ASF', + 'C2853068083-ASF', + 'C2727902012-ASF', + 'C2850223384-ASF', + 'C2853086824-ASF', + 'C2727901263-ASF', + 'C2850224301-ASF', + 'C2853089814-ASF', + 'C2727901639-ASF', + 'C2850225137-ASF', + 'C2853091612-ASF', + 'C2727901523-ASF', + 'C2850225585-ASF', + 'C2853145197-ASF', + 'C2727900439-ASF', + 'C2850234202-ASF', + 'C2853147928-ASF', + 'C2723110181-ASF', + 'C2850235455-ASF', + 'C2853153429-ASF', + 'C2727900827-ASF', + 'C2850237619-ASF', + 'C2853156054-ASF', + 'C2727900080-ASF', + 'C2850259510-ASF', + 'C2854332392-ASF', + 'C2727896667-ASF', + 'C2850261892-ASF', + 'C2854335566-ASF', + 'C2727897718-ASF', + 'C2850262927-ASF', + 'C2854338529-ASF', + 'C2727896018-ASF', + 'C2850263910-ASF', + 'C2854341702-ASF', + 'C2727896460-ASF', + 'C2850265000-ASF', + 'C2854344945-ASF', + 'C2727894546-ASF', + 'C2874824964-ASF', ], } collections_by_processing_level = { - "SLC": [ - "C1214470488-ASF", - "C1205428742-ASF", - "C1234413245-ASFDEV", - "C1327985661-ASF", - "C1216244348-ASF", - "C1234413263-ASFDEV", - "C1661710588-ASF", - "C1661710590-ASF", - "C1226557811-ASF", - "C1226557812-ASF", - "C1661710603-ASF", - "C1661710604-ASF", - "C1226557817-ASF", - "C1226557818-ASF", + 'SLC': [ + 'C1214470488-ASF', + 'C1205428742-ASF', + 'C1234413245-ASFDEV', + 'C1327985661-ASF', + 'C1216244348-ASF', + 'C1234413263-ASFDEV', + 'C1661710588-ASF', + 'C1661710590-ASF', + 'C1226557811-ASF', + 'C1226557812-ASF', + 'C1661710603-ASF', + 'C1661710604-ASF', + 'C1226557817-ASF', + 'C1226557818-ASF', ], - "GRD_HD": [ - "C1214470533-ASF", - "C1212201032-ASF", - "C1234413229-ASFDEV", - "C1327985645-ASF", - "C1216244589-ASF", + 'GRD_HD': [ + 'C1214470533-ASF', + 'C1212201032-ASF', + 'C1234413229-ASFDEV', + 'C1327985645-ASF', + 'C1216244589-ASF', ], - "METADATA_GRD_HD": [ - "C1214470576-ASF", - "C1212209226-ASF", - "C1234413232-ASFDEV", - "C1327985741-ASF", - "C1216244601-ASF", + 'METADATA_GRD_HD': [ + 'C1214470576-ASF', + 'C1212209226-ASF', + 'C1234413232-ASFDEV', + 'C1327985741-ASF', + 'C1216244601-ASF', ], - "GUNW_STD": [ - "C1595422627-ASF", - "C1225776654-ASF", - "C1595422627-ASF", - "C1225776654-ASF", + 'GUNW_STD': [ + 'C1595422627-ASF', + 'C1225776654-ASF', + 'C1595422627-ASF', + 'C1225776654-ASF', ], - "METADATA_SLC": [ - "C1214470496-ASF", - "C1208117434-ASF", - "C1234413236-ASFDEV", - "C1327985617-ASF", - "C1216244585-ASF", - "C1234413254-ASFDEV", + 'METADATA_SLC': [ + 'C1214470496-ASF', + 'C1208117434-ASF', + 'C1234413236-ASFDEV', + 'C1327985617-ASF', + 'C1216244585-ASF', + 'C1234413254-ASFDEV', ], - "METADATA_RAW": [ - "C1214470532-ASF", - "C1208115009-ASF", - "C1234413235-ASFDEV", - "C1327985650-ASF", - "C1216244595-ASF", + 'METADATA_RAW': [ + 'C1214470532-ASF', + 'C1208115009-ASF', + 'C1234413235-ASFDEV', + 'C1327985650-ASF', + 'C1216244595-ASF', ], - "OCN": [ - "C1214472977-ASF", - "C1212212560-ASF", - "C1234413237-ASFDEV", - "C1327985579-ASF", - "C1216244593-ASF", - "C1234413255-ASFDEV", + 'OCN': [ + 'C1214472977-ASF', + 'C1212212560-ASF', + 'C1234413237-ASFDEV', + 'C1327985579-ASF', + 'C1216244593-ASF', + 'C1234413255-ASFDEV', ], - "METADATA_GRD_MD": [ - "C1214472336-ASF", - "C1212212493-ASF", - "C1234413233-ASFDEV", - "C1327985578-ASF", - "C1216244591-ASF", + 'METADATA_GRD_MD': [ + 'C1214472336-ASF', + 'C1212212493-ASF', + 'C1234413233-ASFDEV', + 'C1327985578-ASF', + 'C1216244591-ASF', ], - "METADATA_OCN": [ - "C1266376001-ASF", - "C1215704763-ASF", - "C1234413234-ASFDEV", - "C1327985646-ASF", - "C1216244590-ASF", - "C1234413252-ASFDEV", + 'METADATA_OCN': [ + 'C1266376001-ASF', + 'C1215704763-ASF', + 'C1234413234-ASFDEV', + 'C1327985646-ASF', + 'C1216244590-ASF', + 'C1234413252-ASFDEV', ], - "GRD_MS": [ - "C1214472994-ASF", - "C1212158318-ASF", - "C1327985740-ASF", - "C1216244600-ASF", + 'GRD_MS': [ + 'C1214472994-ASF', + 'C1212158318-ASF', + 'C1327985740-ASF', + 'C1216244600-ASF', ], - "METADATA_GRD_HS": [ - "C1214470732-ASF", - "C1212158326-ASF", - "C1234413243-ASFDEV", - "C1327985619-ASF", - "C1216244587-ASF", + 'METADATA_GRD_HS': [ + 'C1214470732-ASF', + 'C1212158326-ASF', + 'C1234413243-ASFDEV', + 'C1327985619-ASF', + 'C1216244587-ASF', ], - "METADATA_GRD_MS": [ - "C1214473170-ASF", - "C1212233976-ASF", - "C1327985739-ASF", - "C1216244598-ASF", + 'METADATA_GRD_MS': [ + 'C1214473170-ASF', + 'C1212233976-ASF', + 'C1327985739-ASF', + 'C1216244598-ASF', ], - "RAW": [ - "C1214470561-ASF", - "C1205264459-ASF", - "C1234413238-ASFDEV", - "C1327985647-ASF", - "C1216244592-ASF", - "C1234413256-ASFDEV", + 'RAW': [ + 'C1214470561-ASF', + 'C1205264459-ASF', + 'C1234413238-ASFDEV', + 'C1327985647-ASF', + 'C1216244592-ASF', + 'C1234413256-ASFDEV', ], - "GRD_MD": [ - "C1214471521-ASF", - "C1212209035-ASF", - "C1234413230-ASFDEV", - "C1327985660-ASF", - "C1216244594-ASF", + 'GRD_MD': [ + 'C1214471521-ASF', + 'C1212209035-ASF', + 'C1234413230-ASFDEV', + 'C1327985660-ASF', + 'C1216244594-ASF', ], - "GRD_HS": [ - "C1214470682-ASF", - "C1212158327-ASF", - "C1234413240-ASFDEV", - "C1327985571-ASF", - "C1216244586-ASF", + 'GRD_HS': [ + 'C1214470682-ASF', + 'C1212158327-ASF', + 'C1234413240-ASFDEV', + 'C1327985571-ASF', + 'C1216244586-ASF', ], - "CSLC": [ - "C2777443834-ASF", - "C1260721945-ASF", - "C2803501758-ASF", - "C1259976861-ASF", + 'CSLC': [ + 'C2777443834-ASF', + 'C1260721945-ASF', + 'C2803501758-ASF', + 'C1259976861-ASF', ], - "RTC": [ - "C2777436413-ASF", - "C1260721853-ASF", - "C2803501097-ASF", - "C1259974840-ASF", + 'RTC': [ + 'C2777436413-ASF', + 'C1260721853-ASF', + 'C2803501097-ASF', + 'C1259974840-ASF', ], - "GRD_FD": ["C1214471197-ASF", "C1212200781-ASF"], - "METADATA_GRD_FD": ["C1214471960-ASF", "C1212209075-ASF"], - "BURST": [ - "C2709161906-ASF", - "C1257024016-ASF", - "C1257175154-ASFDEV", + 'GRD_FD': ['C1214471197-ASF', 'C1212200781-ASF'], + 'METADATA_GRD_FD': ['C1214471960-ASF', 'C1212209075-ASF'], + 'BURST': [ + 'C2709161906-ASF', + 'C1257024016-ASF', + 'C1257175154-ASFDEV', ], - "GUNW_AMP": [ - "C1596065640-ASF", - "C1225776655-ASF", - "C1596065640-ASF", - "C1225776655-ASF", + 'GUNW_AMP': [ + 'C1596065640-ASF', + 'C1225776655-ASF', + 'C1596065640-ASF', + 'C1225776655-ASF', ], - "GUNW_COH": [ - "C1596065639-ASF", - "C1225776657-ASF", - "C1596065639-ASF", - "C1225776657-ASF", + 'GUNW_COH': [ + 'C1596065639-ASF', + 'C1225776657-ASF', + 'C1596065639-ASF', + 'C1225776657-ASF', ], - "GUNW_CON": [ - "C1596065641-ASF", - "C1225776658-ASF", - "C1596065641-ASF", - "C1225776658-ASF", + 'GUNW_CON': [ + 'C1596065641-ASF', + 'C1225776658-ASF', + 'C1596065641-ASF', + 'C1225776658-ASF', ], - "GUNW_UNW": [ - "C1595765183-ASF", - "C1225776659-ASF", - "C1595765183-ASF", - "C1225776659-ASF", + 'GUNW_UNW': [ + 'C1595765183-ASF', + 'C1225776659-ASF', + 'C1595765183-ASF', + 'C1225776659-ASF', ], - "CSLC-STATIC": ["C1259982010-ASF", "C2795135668-ASF"], - "RTC-STATIC": ["C1259981910-ASF", "C2795135174-ASF"], - "GRD": [ - "C1661710583-ASF", - "C1661710586-ASF", - "C1226557808-ASF", - "C1226557810-ASF", - "C1661710597-ASF", - "C1661710600-ASF", - "C1226557815-ASF", - "C1226557816-ASF", + 'CSLC-STATIC': ['C1259982010-ASF', 'C2795135668-ASF'], + 'RTC-STATIC': ['C1259981910-ASF', 'C2795135174-ASF'], + 'GRD': [ + 'C1661710583-ASF', + 'C1661710586-ASF', + 'C1226557808-ASF', + 'C1226557810-ASF', + 'C1661710597-ASF', + 'C1661710600-ASF', + 'C1226557815-ASF', + 'C1226557816-ASF', ], - "RTC_HI_RES": ["C1206487504-ASF", "C1207181535-ASF"], - "RTC_LOW_RES": ["C1206487217-ASF", "C1208013295-ASF"], - "L1.5": ["C1206485940-ASF", "C1205261223-ASF"], - "KMZ": [ - "C1206156901-ASF", - "C1207019609-ASF", - "C1214336554-ASF", - "C1214353859-ASF", - "C1212019993-ASF", - "C1210485039-ASF", + 'RTC_HI_RES': ['C1206487504-ASF', 'C1207181535-ASF'], + 'RTC_LOW_RES': ['C1206487217-ASF', 'C1208013295-ASF'], + 'L1.5': ['C1206485940-ASF', 'C1205261223-ASF'], + 'KMZ': [ + 'C1206156901-ASF', + 'C1207019609-ASF', + 'C1214336554-ASF', + 'C1214353859-ASF', + 'C1212019993-ASF', + 'C1210485039-ASF', ], - "L1.0": ["C1206485320-ASF"], - "L1.1": ["C1206485527-ASF", "C1207710476-ASF", "C1239611505-ASFDEV"], - "L2.2": ["C2011599335-ASF", "C1239927797-ASF", "C1238733834-ASFDEV"], - "L0": [ - "C1210197768-ASF", - "C1205261222-ASF", - "C1208794942-ASF", - "C1207143701-ASF", - "C1207933168-ASF", - "C1207175327-ASF", - "C1206897141-ASF", + 'L1.0': ['C1206485320-ASF'], + 'L1.1': ['C1206485527-ASF', 'C1207710476-ASF', 'C1239611505-ASFDEV'], + 'L2.2': ['C2011599335-ASF', 'C1239927797-ASF', 'C1238733834-ASFDEV'], + 'L0': [ + 'C1210197768-ASF', + 'C1205261222-ASF', + 'C1208794942-ASF', + 'C1207143701-ASF', + 'C1207933168-ASF', + 'C1207175327-ASF', + 'C1206897141-ASF', ], - "L1": [ - "C1211627521-ASF", - "C1205302527-ASF", - "C1209373626-ASF", - "C1207144966-ASF", - "C1208662092-ASF", - "C1207177736-ASF", - "C1206936391-ASF", - "C1205181982-ASF", - "C1206500991-ASF", - "C1206144699-ASF", + 'L1': [ + 'C1211627521-ASF', + 'C1205302527-ASF', + 'C1209373626-ASF', + 'C1207144966-ASF', + 'C1208662092-ASF', + 'C1207177736-ASF', + 'C1206936391-ASF', + 'C1205181982-ASF', + 'C1206500991-ASF', + 'C1206144699-ASF', ], - "3FP": ["C1213921661-ASF", "C1213928843-ASF", "C1205256880-ASF", "C1208713702-ASF"], - "JPG": ["C1213921626-ASF", "C1000000306-ASF"], - "CSTOKES": ["C1213927035-ASF", "C1208707768-ASF"], - "DEM": ["C179001730-ASF", "C1208655639-ASF"], - "CTIF": ["C1213925022-ASF", "C1208680681-ASF"], - "LTIF": ["C1213926419-ASF", "C1208691361-ASF"], - "PTIF": ["C1213926777-ASF", "C1208703384-ASF"], - "LSTOKES": ["C1213927939-ASF"], - "PSTOKES": ["C1213928209-ASF"], - "ATI": ["C1208652494-ASF"], - "GEOTIFF": ["C1206500826-ASF", "C1206752770-ASF"], - "L1A_Radar_RO_ISO_XML": [ - "C1243122884-ASF", - "C1243141638-ASF", - "C1243162394-ASF", - "C1233103964-ASF", - "C1213136752-ASF", - "C1213136799-ASF", + '3FP': ['C1213921661-ASF', 'C1213928843-ASF', 'C1205256880-ASF', 'C1208713702-ASF'], + 'JPG': ['C1213921626-ASF', 'C1000000306-ASF'], + 'CSTOKES': ['C1213927035-ASF', 'C1208707768-ASF'], + 'DEM': ['C179001730-ASF', 'C1208655639-ASF'], + 'CTIF': ['C1213925022-ASF', 'C1208680681-ASF'], + 'LTIF': ['C1213926419-ASF', 'C1208691361-ASF'], + 'PTIF': ['C1213926777-ASF', 'C1208703384-ASF'], + 'LSTOKES': ['C1213927939-ASF'], + 'PSTOKES': ['C1213928209-ASF'], + 'ATI': ['C1208652494-ASF'], + 'GEOTIFF': ['C1206500826-ASF', 'C1206752770-ASF'], + 'L1A_Radar_RO_ISO_XML': [ + 'C1243122884-ASF', + 'C1243141638-ASF', + 'C1243162394-ASF', + 'C1233103964-ASF', + 'C1213136752-ASF', + 'C1213136799-ASF', ], - "L1A_Radar_RO_QA": [ - "C1243124139-ASF", - "C1243168733-ASF", - "C1243168866-ASF", - "C1216074923-ASF", - "C1213136709-ASF", - "C1213136844-ASF", + 'L1A_Radar_RO_QA': [ + 'C1243124139-ASF', + 'C1243168733-ASF', + 'C1243168866-ASF', + 'C1216074923-ASF', + 'C1213136709-ASF', + 'C1213136844-ASF', ], - "L1A_Radar_HDF5": [ - "C1214473171-ASF", - "C1243149604-ASF", - "C1212243761-ASF", - "C1213091807-ASF", + 'L1A_Radar_HDF5': [ + 'C1214473171-ASF', + 'C1243149604-ASF', + 'C1212243761-ASF', + 'C1213091807-ASF', ], - "L1A_Radar_ISO_XML": [ - "C1214473426-ASF", - "C1243119801-ASF", - "C1212243437-ASF", - "C1213096699-ASF", + 'L1A_Radar_ISO_XML': [ + 'C1214473426-ASF', + 'C1243119801-ASF', + 'C1212243437-ASF', + 'C1213096699-ASF', ], - "L1A_Radar_QA": [ - "C1214473839-ASF", - "C1243133204-ASF", - "C1212249653-ASF", - "C1213101573-ASF", + 'L1A_Radar_QA': [ + 'C1214473839-ASF', + 'C1243133204-ASF', + 'C1212249653-ASF', + 'C1213101573-ASF', ], - "L1A_Radar_RO_HDF5": [ - "C1243197402-ASF", - "C1243215430-ASF", - "C1243124754-ASF", - "C1213136240-ASF", - "C1216074755-ASF", + 'L1A_Radar_RO_HDF5': [ + 'C1243197402-ASF', + 'C1243215430-ASF', + 'C1243124754-ASF', + 'C1213136240-ASF', + 'C1216074755-ASF', ], - "L1B_S0_LoRes_HDF5": [ - "C1214473308-ASF", - "C1243253631-ASF", - "C1243133445-ASF", - "C1212249811-ASF", - "C1213125007-ASF", - "C1216074919-ASF", + 'L1B_S0_LoRes_HDF5': [ + 'C1214473308-ASF', + 'C1243253631-ASF', + 'C1243133445-ASF', + 'C1212249811-ASF', + 'C1213125007-ASF', + 'C1216074919-ASF', ], - "L1B_S0_LoRes_ISO_XML": [ - "C1214473550-ASF", - "C1243197502-ASF", - "C1243126328-ASF", - "C1212196951-ASF", - "C1213115690-ASF", - "C1216074758-ASF", + 'L1B_S0_LoRes_ISO_XML': [ + 'C1214473550-ASF', + 'C1243197502-ASF', + 'C1243126328-ASF', + 'C1212196951-ASF', + 'C1213115690-ASF', + 'C1216074758-ASF', ], - "L1B_S0_LoRes_QA": [ - "C1214474243-ASF", - "C1243216659-ASF", - "C1243129847-ASF", - "C1212243666-ASF", - "C1213115896-ASF", - "C1216074761-ASF", + 'L1B_S0_LoRes_QA': [ + 'C1214474243-ASF', + 'C1243216659-ASF', + 'C1243129847-ASF', + 'C1212243666-ASF', + 'C1213115896-ASF', + 'C1216074761-ASF', ], - "L1C_S0_HiRes_HDF5": [ - "C1214473367-ASF", - "C1243268956-ASF", - "C1243144528-ASF", - "C1212250364-ASF", - "C1213134622-ASF", - "C1216074770-ASF", + 'L1C_S0_HiRes_HDF5': [ + 'C1214473367-ASF', + 'C1243268956-ASF', + 'C1243144528-ASF', + 'C1212250364-ASF', + 'C1213134622-ASF', + 'C1216074770-ASF', ], - "L1C_S0_HiRes_ISO_XML": [ - "C1214473624-ASF", - "C1243228612-ASF", - "C1243136142-ASF", - "C1212246173-ASF", - "C1213125156-ASF", - "C1216074764-ASF", + 'L1C_S0_HiRes_ISO_XML': [ + 'C1214473624-ASF', + 'C1243228612-ASF', + 'C1243136142-ASF', + 'C1212246173-ASF', + 'C1213125156-ASF', + 'C1216074764-ASF', ], - "L1C_S0_HiRes_QA": [ - "C1214474435-ASF", - "C1243255360-ASF", - "C1243140611-ASF", - "C1212249773-ASF", - "C1213134486-ASF", - "C1233101609-ASF", + 'L1C_S0_HiRes_QA': [ + 'C1214474435-ASF', + 'C1243255360-ASF', + 'C1243140611-ASF', + 'C1212249773-ASF', + 'C1213134486-ASF', + 'C1233101609-ASF', ], - "METADATA": [ - "C1214353986-ASF", - "C1214336717-ASF", - "C1210487703-ASF", - "C1212030772-ASF", + 'METADATA': [ + 'C1214353986-ASF', + 'C1214336717-ASF', + 'C1210487703-ASF', + 'C1212030772-ASF', ], - "INTERFEROMETRY": ["C1214336045-ASF", "C1212001698-ASF"], - "AMPLITUDE": ["C1214335430-ASF", "C1206116665-ASF"], - "AMPLITUDE_GRD": ["C1214335471-ASF", "C1206132445-ASF"], - "DEM_TIFF": [ - "C1214335903-ASF", - "C1214353593-ASF", - "C1211962154-ASF", - "C1207638502-ASF", + 'INTERFEROMETRY': ['C1214336045-ASF', 'C1212001698-ASF'], + 'AMPLITUDE': ['C1214335430-ASF', 'C1206116665-ASF'], + 'AMPLITUDE_GRD': ['C1214335471-ASF', 'C1206132445-ASF'], + 'DEM_TIFF': [ + 'C1214335903-ASF', + 'C1214353593-ASF', + 'C1211962154-ASF', + 'C1207638502-ASF', ], - "INTERFEROMETRY_GRD": ["C1214336154-ASF", "C1212005594-ASF"], - "INC": ["C1214353754-ASF", "C1210025872-ASF"], - "PROJECTED": ["C1214337770-ASF", "C1207188317-ASF"], - "PROJECTED_ML3X3": ["C1214354144-ASF", "C1210546638-ASF"], - "PROJECTED_ML5X5": ["C1214354235-ASF", "C1206122195-ASF"], - "COMPLEX": ["C1214343609-ASF", "C1209970710-ASF"], - "PAULI": ["C1214354031-ASF", "C1207038647-ASF"], - "SLOPE": ["C1214408428-ASF", "C1210599503-ASF"], - "STOKES": ["C1214419355-ASF", "C1210599673-ASF"], + 'INTERFEROMETRY_GRD': ['C1214336154-ASF', 'C1212005594-ASF'], + 'INC': ['C1214353754-ASF', 'C1210025872-ASF'], + 'PROJECTED': ['C1214337770-ASF', 'C1207188317-ASF'], + 'PROJECTED_ML3X3': ['C1214354144-ASF', 'C1210546638-ASF'], + 'PROJECTED_ML5X5': ['C1214354235-ASF', 'C1206122195-ASF'], + 'COMPLEX': ['C1214343609-ASF', 'C1209970710-ASF'], + 'PAULI': ['C1214354031-ASF', 'C1207038647-ASF'], + 'SLOPE': ['C1214408428-ASF', 'C1210599503-ASF'], + 'STOKES': ['C1214419355-ASF', 'C1210599673-ASF'], } # Helper Methods diff --git a/asf_search/CMR/field_map.py b/asf_search/CMR/field_map.py index 2ca322f0..f8b8bb19 100644 --- a/asf_search/CMR/field_map.py +++ b/asf_search/CMR/field_map.py @@ -1,49 +1,50 @@ field_map = { # API parameter CMR keyword CMR format strings - 'absoluteOrbit': {'key': 'orbit_number', 'fmt': '{0}'}, - 'asfFrame': {'key': 'attribute[]', 'fmt': 'int,FRAME_NUMBER,{0}'}, - 'maxBaselinePerp': {'key': 'attribute[]', 'fmt': 'float,INSAR_BASELINE,,{0}'}, - 'minBaselinePerp': {'key': 'attribute[]', 'fmt': 'float,INSAR_BASELINE,{0},'}, - 'bbox': {'key': 'bounding_box', 'fmt': '{0}'}, - 'beamMode': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE,{0}'}, - 'beamSwath': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE_TYPE,{0}'}, - 'campaign': {'key': 'attribute[]', 'fmt': 'string,MISSION_NAME,{0}'}, - 'circle': {'key': 'circle', 'fmt': '{0}'}, - 'maxDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,,{0}'}, - 'minDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,{0},'}, - 'maxFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,,{0}'}, # noqa F401 - 'minFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,{0},'}, # noqa F401 - 'flightDirection': {'key': 'attribute[]', 'fmt': 'string,ASCENDING_DESCENDING,{0}'}, # noqa F401 - 'flightLine': {'key': 'attribute[]', 'fmt': 'string,FLIGHT_LINE,{0}'}, - 'frame': {'key': 'attribute[]', 'fmt': 'int,CENTER_ESA_FRAME,{0}'}, - 'granule_list': {'key': 'readable_granule_name[]', 'fmt': '{0}'}, - 'groupID': {'key': 'attribute[]', 'fmt': 'string,GROUP_ID,{0}'}, - 'insarStackId': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_ID,{0}'}, - 'linestring': {'key': 'line', 'fmt': '{0}'}, - 'lookDirection': {'key': 'attribute[]', 'fmt': 'string,LOOK_DIRECTION,{0}'}, - 'maxInsarStackSize': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_SIZE,,{0}'}, - 'minInsarStackSize': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_SIZE,{0},'}, - 'instrument': {'key': 'instrument[]', 'fmt': '{0}'}, - 'offNadirAngle': {'key': 'attribute[]', 'fmt': 'float,OFF_NADIR_ANGLE,{0}'}, - 'platform': {'key': 'platform[]', 'fmt': '{0}'}, - 'polarization': {'key': 'attribute[]', 'fmt': 'string,POLARIZATION,{0}'}, - 'point': {'key': 'point', 'fmt': '{0}'}, - 'polygon': {'key': 'polygon', 'fmt': '{0}'}, - 'processingDate': {'key': 'updated_since', 'fmt': '{0}'}, - 'processingLevel': {'key': 'attribute[]', 'fmt': 'string,PROCESSING_TYPE,{0}'}, - 'product_list': {'key': 'granule_ur[]', 'fmt': '{0}'}, - 'provider': {'key': 'provider', 'fmt': '{0}'}, - 'relativeOrbit': {'key': 'attribute[]', 'fmt': 'int,PATH_NUMBER,{0}'}, - 'temporal': {'key': 'temporal', 'fmt': '{0}'}, - 'collections': {'key': 'echo_collection_id[]', 'fmt': '{0}'}, - 'shortName': {'key': 'shortName', 'fmt': '{0}'}, - 'temporalBaselineDays': {'key': 'attribute[]', 'fmt': 'int,TEMPORAL_BASELINE_DAYS,{0}'}, # noqa F401 - + 'absoluteOrbit': {'key': 'orbit_number', 'fmt': '{0}'}, + 'asfFrame': {'key': 'attribute[]', 'fmt': 'int,FRAME_NUMBER,{0}'}, + 'maxBaselinePerp': {'key': 'attribute[]', 'fmt': 'float,INSAR_BASELINE,,{0}'}, + 'minBaselinePerp': {'key': 'attribute[]', 'fmt': 'float,INSAR_BASELINE,{0},'}, + 'bbox': {'key': 'bounding_box', 'fmt': '{0}'}, + 'beamMode': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE,{0}'}, + 'beamSwath': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE_TYPE,{0}'}, + 'campaign': {'key': 'attribute[]', 'fmt': 'string,MISSION_NAME,{0}'}, + 'circle': {'key': 'circle', 'fmt': '{0}'}, + 'maxDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,,{0}'}, + 'minDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,{0},'}, + 'maxFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,,{0}'}, # noqa F401 + 'minFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,{0},'}, # noqa F401 + 'flightDirection': {'key': 'attribute[]', 'fmt': 'string,ASCENDING_DESCENDING,{0}'}, # noqa F401 + 'flightLine': {'key': 'attribute[]', 'fmt': 'string,FLIGHT_LINE,{0}'}, + 'frame': {'key': 'attribute[]', 'fmt': 'int,CENTER_ESA_FRAME,{0}'}, + 'granule_list': {'key': 'readable_granule_name[]', 'fmt': '{0}'}, + 'groupID': {'key': 'attribute[]', 'fmt': 'string,GROUP_ID,{0}'}, + 'insarStackId': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_ID,{0}'}, + 'linestring': {'key': 'line', 'fmt': '{0}'}, + 'lookDirection': {'key': 'attribute[]', 'fmt': 'string,LOOK_DIRECTION,{0}'}, + 'maxInsarStackSize': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_SIZE,,{0}'}, + 'minInsarStackSize': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_SIZE,{0},'}, + 'instrument': {'key': 'instrument[]', 'fmt': '{0}'}, + 'offNadirAngle': {'key': 'attribute[]', 'fmt': 'float,OFF_NADIR_ANGLE,{0}'}, + 'platform': {'key': 'platform[]', 'fmt': '{0}'}, + 'polarization': {'key': 'attribute[]', 'fmt': 'string,POLARIZATION,{0}'}, + 'point': {'key': 'point', 'fmt': '{0}'}, + 'polygon': {'key': 'polygon', 'fmt': '{0}'}, + 'processingDate': {'key': 'updated_since', 'fmt': '{0}'}, + 'processingLevel': {'key': 'attribute[]', 'fmt': 'string,PROCESSING_TYPE,{0}'}, + 'product_list': {'key': 'granule_ur[]', 'fmt': '{0}'}, + 'provider': {'key': 'provider', 'fmt': '{0}'}, + 'relativeOrbit': {'key': 'attribute[]', 'fmt': 'int,PATH_NUMBER,{0}'}, + 'temporal': {'key': 'temporal', 'fmt': '{0}'}, + 'collections': {'key': 'echo_collection_id[]', 'fmt': '{0}'}, + 'shortName': {'key': 'shortName', 'fmt': '{0}'}, + 'temporalBaselineDays': { + 'key': 'attribute[]', + 'fmt': 'int,TEMPORAL_BASELINE_DAYS,{0}', + }, # noqa F401 # SLC BURST fields - 'absoluteBurstID': {'key': 'attribute[]', 'fmt': 'int,BURST_ID_ABSOLUTE,{0}'}, - 'relativeBurstID': {'key': 'attribute[]', 'fmt': 'int,BURST_ID_RELATIVE,{0}'}, - 'fullBurstID': {'key': 'attribute[]', 'fmt': 'string,BURST_ID_FULL,{0}'}, - + 'absoluteBurstID': {'key': 'attribute[]', 'fmt': 'int,BURST_ID_ABSOLUTE,{0}'}, + 'relativeBurstID': {'key': 'attribute[]', 'fmt': 'int,BURST_ID_RELATIVE,{0}'}, + 'fullBurstID': {'key': 'attribute[]', 'fmt': 'string,BURST_ID_FULL,{0}'}, # OPERA-S1 field - 'operaBurstID': {'key': 'attribute[]', 'fmt': 'string,OPERA_BURST_ID,{0}'}, + 'operaBurstID': {'key': 'attribute[]', 'fmt': 'string,OPERA_BURST_ID,{0}'}, } diff --git a/asf_search/CMR/subquery.py b/asf_search/CMR/subquery.py index 09c0b2e1..fbb00b56 100644 --- a/asf_search/CMR/subquery.py +++ b/asf_search/CMR/subquery.py @@ -24,29 +24,27 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: params = dict(opts) # Break out two big list offenders into manageable chunks - for chunked_key in ["granule_list", "product_list"]: + for chunked_key in ['granule_list', 'product_list']: if params.get(chunked_key) is not None: params[chunked_key] = chunk_list(params[chunked_key], CMR_PAGE_SIZE) list_param_names = [ - "platform", - "season", - "collections", - "dataset", - "cmr_keywords", - "shortName", - "circle", - "linestring", - "point", + 'platform', + 'season', + 'collections', + 'dataset', + 'cmr_keywords', + 'shortName', + 'circle', + 'linestring', + 'point', ] # these parameters will dodge the subquery system skip_param_names = [ - "maxResults", + 'maxResults', ] # these params exist in opts, but shouldn't be passed on to subqueries at ALL - collections, aliased_keywords = get_keyword_concept_ids( - params, opts.collectionAlias - ) - params["collections"] = list(union1d(collections, params.get("collections", []))) + collections, aliased_keywords = get_keyword_concept_ids(params, opts.collectionAlias) + params['collections'] = list(union1d(collections, params.get('collections', []))) for keyword in [*skip_param_names, *aliased_keywords]: params.pop(keyword, None) @@ -76,9 +74,9 @@ def _build_subquery( for p in query: q.update(p) - q["provider"] = opts.provider - q["host"] = opts.host - q["session"] = copy(opts.session) + q['provider'] = opts.provider + q['host'] = opts.host + q['session'] = copy(opts.session) return ASFSearchOptions(**q, **list_params) @@ -100,33 +98,31 @@ def get_keyword_concept_ids(params: dict, use_collection_alias: bool = True) -> aliased_keywords = [] if use_collection_alias: - if "processingLevel" in params.keys(): + if 'processingLevel' in params.keys(): collections = get_concept_id_alias( - params.get("processingLevel"), collections_by_processing_level + params.get('processingLevel'), collections_by_processing_level ) if len(collections): - aliased_keywords.append("processingLevel") + aliased_keywords.append('processingLevel') - if "platform" in params.keys(): + if 'platform' in params.keys(): platform_concept_ids = get_concept_id_alias( - [platform.upper() for platform in params.get("platform")], + [platform.upper() for platform in params.get('platform')], collections_per_platform, ) if len(platform_concept_ids): - aliased_keywords.append("platform") + aliased_keywords.append('platform') collections = _get_intersection(platform_concept_ids, collections) - if "dataset" in params.keys(): - aliased_keywords.append("dataset") - dataset_concept_ids = get_dataset_concept_ids(params.get("dataset")) + if 'dataset' in params.keys(): + aliased_keywords.append('dataset') + dataset_concept_ids = get_dataset_concept_ids(params.get('dataset')) collections = _get_intersection(dataset_concept_ids, collections) return collections, aliased_keywords -def _get_intersection( - keyword_concept_ids: List[str], intersecting_ids: List[str] -) -> List[str]: +def _get_intersection(keyword_concept_ids: List[str], intersecting_ids: List[str]) -> List[str]: """ Returns the intersection between two lists. If the second list is empty the first list is return unchaged @@ -175,7 +171,7 @@ def translate_param(param_name, param_val) -> List[dict]: formatted_val = unformatted_val if isinstance(unformatted_val, list): - formatted_val = ",".join([f"{t}" for t in unformatted_val]) + formatted_val = ','.join([f'{t}' for t in unformatted_val]) param_list.append({param_name: formatted_val}) diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index 26e39198..d4b8359e 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -24,15 +24,15 @@ def translate_opts(opts: ASFSearchOptions) -> List: # Escape commas for each key in the list. # intersectsWith, temporal, and other keys you don't want to escape, so keep whitelist instead - for escape_commas in ["campaign"]: + for escape_commas in ['campaign']: if escape_commas in dict_opts: - dict_opts[escape_commas] = dict_opts[escape_commas].replace(",", "\\,") + dict_opts[escape_commas] = dict_opts[escape_commas].replace(',', '\\,') dict_opts = fix_cmr_shapes(dict_opts) # Special case to unravel WKT field a little for compatibility - if "intersectsWith" in dict_opts: - shape = wkt.loads(dict_opts.pop("intersectsWith", None)) + if 'intersectsWith' in dict_opts: + shape = wkt.loads(dict_opts.pop('intersectsWith', None)) # If a wide rectangle is provided, make sure to use the bounding box # instead of the wkt for better responses from CMR @@ -48,14 +48,14 @@ def translate_opts(opts: ASFSearchOptions) -> List: bottom_left = [str(coord) for coord in bounds[:2]] top_right = [str(coord) for coord in bounds[2:]] - bbox = ",".join([*bottom_left, *top_right]) - dict_opts["bbox"] = bbox + bbox = ','.join([*bottom_left, *top_right]) + dict_opts['bbox'] = bbox else: - (shapeType, shape) = wkt_to_cmr_shape(shape).split(":") + (shapeType, shape) = wkt_to_cmr_shape(shape).split(':') dict_opts[shapeType] = shape # If you need to use the temporal key: - if any(key in dict_opts for key in ["start", "end", "season"]): + if any(key in dict_opts for key in ['start', 'end', 'season']): dict_opts = fix_date(dict_opts) dict_opts = fix_range_params(dict_opts) @@ -64,7 +64,7 @@ def translate_opts(opts: ASFSearchOptions) -> List: cmr_opts = [] # user provided umm fields - custom_cmr_keywords = dict_opts.pop("cmr_keywords", []) + custom_cmr_keywords = dict_opts.pop('cmr_keywords', []) for key, val in dict_opts.items(): # If it's "session" or something else CMR doesn't accept, don't send it: @@ -72,19 +72,19 @@ def translate_opts(opts: ASFSearchOptions) -> List: continue if isinstance(val, list): for x in val: - if key in ["granule_list", "product_list"]: - for y in x.split(","): + if key in ['granule_list', 'product_list']: + for y in x.split(','): cmr_opts.append((key, y)) else: if isinstance(x, tuple): - cmr_opts.append((key, ",".join([str(t) for t in x]))) + cmr_opts.append((key, ','.join([str(t) for t in x]))) else: cmr_opts.append((key, x)) else: cmr_opts.append((key, val)) # translate the above tuples to CMR key/values for i, opt in enumerate(cmr_opts): - cmr_opts[i] = field_map[opt[0]]["key"], field_map[opt[0]]["fmt"].format(opt[1]) + cmr_opts[i] = field_map[opt[0]]['key'], field_map[opt[0]]['fmt'].format(opt[1]) if should_use_asf_frame(cmr_opts): cmr_opts = use_asf_frame(cmr_opts) @@ -92,12 +92,12 @@ def translate_opts(opts: ASFSearchOptions) -> List: cmr_opts.extend(custom_cmr_keywords) additional_keys = [ - ("page_size", CMR_PAGE_SIZE), - ("options[temporal][and]", "true"), - ("sort_key[]", "-end_date"), - ("sort_key[]", "granule_ur"), - ("options[platform][ignore_case]", "true"), - ("provider", opts.provider), + ('page_size', CMR_PAGE_SIZE), + ('options[temporal][and]', 'true'), + ('sort_key[]', '-end_date'), + ('sort_key[]', 'granule_ur'), + ('options[platform][ignore_case]', 'true'), + ('provider', opts.provider), ] cmr_opts.extend(additional_keys) @@ -107,25 +107,23 @@ def translate_opts(opts: ASFSearchOptions) -> List: def fix_cmr_shapes(fixed_params: Dict[str, Any]) -> Dict[str, Any]: """Fixes raw CMR lon lat coord shapes""" - for param in ["point", "linestring", "circle"]: + for param in ['point', 'linestring', 'circle']: if param in fixed_params: - fixed_params[param] = ",".join(map(str, fixed_params[param])) + fixed_params[param] = ','.join(map(str, fixed_params[param])) return fixed_params def should_use_asf_frame(cmr_opts): - asf_frame_platforms = ["SENTINEL-1A", "SENTINEL-1B", "ALOS"] + asf_frame_platforms = ['SENTINEL-1A', 'SENTINEL-1B', 'ALOS'] - asf_frame_collections = get_concept_id_alias( - asf_frame_platforms, collections_per_platform - ) + asf_frame_collections = get_concept_id_alias(asf_frame_platforms, collections_per_platform) return any( [ - p[0] == "platform[]" + p[0] == 'platform[]' and p[1].upper() in asf_frame_platforms - or p[0] == "echo_collection_id[]" + or p[0] == 'echo_collection_id[]' and p[1] in asf_frame_collections for p in cmr_opts ] @@ -145,13 +143,13 @@ def use_asf_frame(cmr_opts): if not isinstance(p[1], str): continue - m = re.search(r"CENTER_ESA_FRAME", p[1]) + m = re.search(r'CENTER_ESA_FRAME', p[1]) if m is None: continue - logging.debug("Sentinel/ALOS subquery, using ASF frame instead of ESA frame") + logging.debug('Sentinel/ALOS subquery, using ASF frame instead of ESA frame') - cmr_opts[n] = (p[0], p[1].replace(",CENTER_ESA_FRAME,", ",FRAME_NUMBER,")) + cmr_opts[n] = (p[0], p[1].replace(',CENTER_ESA_FRAME,', ',FRAME_NUMBER,')) return cmr_opts @@ -195,33 +193,29 @@ def try_parse_date(value: str) -> Optional[str]: date = date.replace(tzinfo=timezone.utc) # Turn all inputs into a consistant format: - return date.strftime("%Y-%m-%dT%H:%M:%SZ") + return date.strftime('%Y-%m-%dT%H:%M:%SZ') def fix_date(fixed_params: Dict[str, Any]): - if "start" in fixed_params or "end" in fixed_params or "season" in fixed_params: - fixed_params["start"] = ( - fixed_params["start"] if "start" in fixed_params else "1978-01-01T00:00:00Z" + if 'start' in fixed_params or 'end' in fixed_params or 'season' in fixed_params: + fixed_params['start'] = ( + fixed_params['start'] if 'start' in fixed_params else '1978-01-01T00:00:00Z' ) - fixed_params["end"] = ( - fixed_params["end"] - if "end" in fixed_params - else datetime.now(datetime.UTC).isoformat() + fixed_params['end'] = ( + fixed_params['end'] if 'end' in fixed_params else datetime.now(datetime.UTC).isoformat() ) - fixed_params["season"] = ( - ",".join(str(x) for x in fixed_params["season"]) - if "season" in fixed_params - else "" + fixed_params['season'] = ( + ','.join(str(x) for x in fixed_params['season']) if 'season' in fixed_params else '' ) - fixed_params["temporal"] = ( + fixed_params['temporal'] = ( f'{fixed_params["start"]},{fixed_params["end"]},{fixed_params["season"]}' ) # And a little cleanup - fixed_params.pop("start", None) - fixed_params.pop("end", None) - fixed_params.pop("season", None) + fixed_params.pop('start', None) + fixed_params.pop('end', None) + fixed_params.pop('season', None) return fixed_params @@ -229,14 +223,14 @@ def fix_date(fixed_params: Dict[str, Any]): def fix_range_params(fixed_params: Dict[str, Any]) -> Dict[str, Any]: """Converts ranges to comma separated strings""" for param in [ - "offNadirAngle", - "relativeOrbit", - "absoluteOrbit", - "frame", - "asfFrame", + 'offNadirAngle', + 'relativeOrbit', + 'absoluteOrbit', + 'frame', + 'asfFrame', ]: if param in fixed_params.keys() and isinstance(fixed_params[param], list): - fixed_params[param] = ",".join([str(val) for val in fixed_params[param]]) + fixed_params[param] = ','.join([str(val) for val in fixed_params[param]]) return fixed_params @@ -261,10 +255,10 @@ def should_use_bbox(shape: BaseGeometry): def wkt_to_cmr_shape(shape: BaseGeometry): # take note of the WKT type - if shape.geom_type not in ["Point", "LineString", "Polygon"]: - raise ValueError("Unsupported WKT: {0}.".format(shape.wkt)) + if shape.geom_type not in ['Point', 'LineString', 'Polygon']: + raise ValueError('Unsupported WKT: {0}.'.format(shape.wkt)) - if shape.geom_type == "Polygon": + if shape.geom_type == 'Polygon': coords = shape.exterior.coords else: # type == Point | Linestring coords = shape.coords @@ -273,5 +267,5 @@ def wkt_to_cmr_shape(shape: BaseGeometry): for lon_lat in coords: lon_lat_sequence.extend(lon_lat) # Turn any "6e8" to a literal number. (As a sting): - coords = ["{:.16f}".format(float(cord)) for cord in lon_lat_sequence] - return "{0}:{1}".format(shape.geom_type.lower(), ",".join(coords)) + coords = ['{:.16f}'.format(float(cord)) for cord in lon_lat_sequence] + return '{0}:{1}'.format(shape.geom_type.lower(), ','.join(coords)) diff --git a/asf_search/Products/AIRSARProduct.py b/asf_search/Products/AIRSARProduct.py index e89fa48b..c868c785 100644 --- a/asf_search/Products/AIRSARProduct.py +++ b/asf_search/Products/AIRSARProduct.py @@ -10,10 +10,13 @@ class AIRSARProduct(ASFProduct): _base_properties = { **ASFProduct._base_properties, - 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, - 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, - 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, - 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, + 'frameNumber': { + 'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], + 'cast': try_parse_int, + }, + 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, + 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index 69a9f43e..0de75bcd 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -13,10 +13,22 @@ class ALOSProduct(ASFStackableProduct): _base_properties = { **ASFStackableProduct._base_properties, - 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, - 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, - 'offNadirAngle': {'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0], 'cast': try_parse_float}, - 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, + 'frameNumber': { + 'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], + 'cast': try_parse_int, + }, + 'faradayRotation': { + 'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], + 'cast': try_parse_float, + }, + 'offNadirAngle': { + 'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0], + 'cast': try_parse_float, + }, + 'bytes': { + 'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], + 'cast': try_round_float, + }, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, } @@ -24,8 +36,8 @@ class ALOSProduct(ASFStackableProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - if self.properties.get("groupID") is None: - self.properties["groupID"] = self.properties["sceneName"] + if self.properties.get('groupID') is None: + self.properties['groupID'] = self.properties['sceneName'] @staticmethod def get_default_baseline_product_type() -> Union[str, None]: diff --git a/asf_search/Products/ARIAS1GUNWProduct.py b/asf_search/Products/ARIAS1GUNWProduct.py index aa4899f4..46f63e1e 100644 --- a/asf_search/Products/ARIAS1GUNWProduct.py +++ b/asf_search/Products/ARIAS1GUNWProduct.py @@ -16,29 +16,31 @@ class ARIAS1GUNWProduct(S1Product): _base_properties = { **S1Product._base_properties, - 'perpendicularBaseline': {'path': ['AdditionalAttributes', ('Name', 'PERPENDICULAR_BASELINE'), 'Values', 0], 'cast': try_parse_float}, + 'perpendicularBaseline': { + 'path': [ + 'AdditionalAttributes', + ('Name', 'PERPENDICULAR_BASELINE'), + 'Values', + 0, + ], + 'cast': try_parse_float, + }, 'orbit': {'path': ['OrbitCalculatedSpatialDomains']}, 'inputGranules': {'path': ['InputGranules']}, - 'ariaVersion': {'path': ['AdditionalAttributes', ('Name', 'VERSION'), 'Values', 0]} + 'ariaVersion': {'path': ['AdditionalAttributes', ('Name', 'VERSION'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - self.properties["orbit"] = [ - orbit["OrbitNumber"] for orbit in self.properties["orbit"] - ] + self.properties['orbit'] = [orbit['OrbitNumber'] for orbit in self.properties['orbit']] - urls = self.umm_get( - self.umm, "RelatedUrls", ("Type", [("USE SERVICE API", "URL")]), 0 - ) + urls = self.umm_get(self.umm, 'RelatedUrls', ('Type', [('USE SERVICE API', 'URL')]), 0) - self.properties["additionalUrls"] = [] + self.properties['additionalUrls'] = [] if urls is not None: - self.properties["url"] = urls[0] - self.properties["fileName"] = ( - self.properties["fileID"] + "." + urls[0].split(".")[-1] - ) - self.properties["additionalUrls"] = urls[1:] + self.properties['url'] = urls[0] + self.properties['fileName'] = self.properties['fileID'] + '.' + urls[0].split('.')[-1] + self.properties['additionalUrls'] = urls[1:] def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ @@ -61,15 +63,15 @@ def get_default_baseline_product_type() -> None: @staticmethod def _is_subclass(item: Dict) -> bool: - platform = ASFProduct.umm_get(item["umm"], "Platforms", 0, "ShortName") - if platform in ["SENTINEL-1A", "SENTINEL-1B"]: + platform = ASFProduct.umm_get(item['umm'], 'Platforms', 0, 'ShortName') + if platform in ['SENTINEL-1A', 'SENTINEL-1B']: asf_platform = ASFProduct.umm_get( - item["umm"], - "AdditionalAttributes", - ("Name", "ASF_PLATFORM"), - "Values", + item['umm'], + 'AdditionalAttributes', + ('Name', 'ASF_PLATFORM'), + 'Values', 0, ) - return "Sentinel-1 Interferogram" in asf_platform + return 'Sentinel-1 Interferogram' in asf_platform return False diff --git a/asf_search/Products/ERSProduct.py b/asf_search/Products/ERSProduct.py index 6c1d1689..49d3d9cf 100644 --- a/asf_search/Products/ERSProduct.py +++ b/asf_search/Products/ERSProduct.py @@ -15,7 +15,10 @@ class ERSProduct(ASFStackableProduct): _base_properties = { **ASFStackableProduct._base_properties, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]}, - 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, + 'bytes': { + 'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], + 'cast': try_round_float, + }, 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, diff --git a/asf_search/Products/NISARProduct.py b/asf_search/Products/NISARProduct.py index e554f2df..7b1fea51 100644 --- a/asf_search/Products/NISARProduct.py +++ b/asf_search/Products/NISARProduct.py @@ -8,19 +8,20 @@ class NISARProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/ """ + _base_properties = { **ASFStackableProduct._base_properties, - 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']} + 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - self.properties["additionalUrls"] = self._get_additional_urls() - self.properties["s3Urls"] = self._get_s3_urls() + self.properties['additionalUrls'] = self._get_additional_urls() + self.properties['s3Urls'] = self._get_s3_urls() - if self.properties.get("groupID") is None: - self.properties["groupID"] = self.properties["sceneName"] + if self.properties.get('groupID') is None: + self.properties['groupID'] = self.properties['sceneName'] @staticmethod def get_default_baseline_product_type() -> Union[str, None]: @@ -44,7 +45,7 @@ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: def get_sort_keys(self) -> Tuple[str, str]: keys = super().get_sort_keys() - if keys[0] == "": - return (self._read_property("processingDate", ""), keys[1]) + if keys[0] == '': + return (self._read_property('processingDate', ''), keys[1]) return keys diff --git a/asf_search/Products/OPERAS1Product.py b/asf_search/Products/OPERAS1Product.py index ee7c5372..94e29c47 100644 --- a/asf_search/Products/OPERAS1Product.py +++ b/asf_search/Products/OPERAS1Product.py @@ -10,54 +10,36 @@ class OPERAS1Product(S1Product): """ _base_properties = { -<<<<<<< HEAD - "centerLat": {"path": []}, # Opera products lacks these fields - "centerLon": {"path": []}, - "frameNumber": {"path": []}, - "operaBurstID": { - "path": ["AdditionalAttributes", ("Name", "OPERA_BURST_ID"), "Values", 0] - }, - "validityStartDate": { - "path": ["TemporalExtent", "SingleDateTime"], - "cast": try_parse_date, - }, - "bytes": {"path": ["DataGranule", "ArchiveAndDistributionInformation"]}, - "subswath": { - "path": ["AdditionalAttributes", ("Name", "SUBSWATH_NAME"), "Values", 0] - }, - "polarization": { - "path": ["AdditionalAttributes", ("Name", "POLARIZATION"), "Values"] - }, # dual polarization is in list rather than a 'VV+VH' style format -======= **S1Product._base_properties, - 'centerLat': {'path': []}, # Opera products lacks these fields + 'centerLat': {'path': []}, # Opera products lacks these fields 'centerLon': {'path': []}, 'frameNumber': {'path': []}, 'operaBurstID': {'path': ['AdditionalAttributes', ('Name', 'OPERA_BURST_ID'), 'Values', 0]}, 'validityStartDate': {'path': ['TemporalExtent', 'SingleDateTime'], 'cast': try_parse_date}, 'bytes': {'path': ['DataGranule', 'ArchiveAndDistributionInformation']}, 'subswath': {'path': ['AdditionalAttributes', ('Name', 'SUBSWATH_NAME'), 'Values', 0]}, - 'polarization': {'path': ['AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values']} # dual polarization is in list rather than a 'VV+VH' style format ->>>>>>> master + 'polarization': { + 'path': ['AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values'] + }, # dual polarization is in list rather than a 'VV+VH' style format } _subclass_concept_ids = { - "C1257995185-ASF", - "C1257995186-ASF", - "C1258354200-ASF", - "C1258354201-ASF", - "C1259974840-ASF", - "C1259976861-ASF", - "C1259981910-ASF", - "C1259982010-ASF", - "C2777436413-ASF", - "C2777443834-ASF", - "C2795135174-ASF", - "C2795135668-ASF", - "C1260721853-ASF", - "C1260721945-ASF", - "C2803501097-ASF", - "C2803501758-ASF", + 'C1257995185-ASF', + 'C1257995186-ASF', + 'C1258354200-ASF', + 'C1258354201-ASF', + 'C1259974840-ASF', + 'C1259976861-ASF', + 'C1259981910-ASF', + 'C1259982010-ASF', + 'C2777436413-ASF', + 'C2777443834-ASF', + 'C2795135174-ASF', + 'C2795135668-ASF', + 'C1260721853-ASF', + 'C1260721945-ASF', + 'C2803501097-ASF', + 'C2803501758-ASF', } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @@ -65,61 +47,54 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): self.baseline = None - self.properties["beamMode"] = self.umm_get( - self.umm, "AdditionalAttributes", ("Name", "BEAM_MODE"), "Values", 0 + self.properties['beamMode'] = self.umm_get( + self.umm, 'AdditionalAttributes', ('Name', 'BEAM_MODE'), 'Values', 0 ) - self.properties["additionalUrls"] = self._get_additional_urls() + self.properties['additionalUrls'] = self._get_additional_urls() - self.properties["operaBurstID"] = self.umm_get( - self.umm, "AdditionalAttributes", ("Name", "OPERA_BURST_ID"), "Values", 0 + self.properties['operaBurstID'] = self.umm_get( + self.umm, 'AdditionalAttributes', ('Name', 'OPERA_BURST_ID'), 'Values', 0 ) - self.properties["bytes"] = { - entry["Name"]: {"bytes": entry["SizeInBytes"], "format": entry["Format"]} - for entry in self.properties["bytes"] + self.properties['bytes'] = { + entry['Name']: {'bytes': entry['SizeInBytes'], 'format': entry['Format']} + for entry in self.properties['bytes'] } center = self.centroid() - self.properties["centerLat"] = center.y - self.properties["centerLon"] = center.x + self.properties['centerLat'] = center.y + self.properties['centerLon'] = center.x - self.properties.pop("frameNumber") + self.properties.pop('frameNumber') - if (processingLevel := self.properties["processingLevel"]) in [ - "RTC", - "RTC-STATIC", + if (processingLevel := self.properties['processingLevel']) in [ + 'RTC', + 'RTC-STATIC', ]: - self.properties["bistaticDelayCorrection"] = self.umm_get( + self.properties['bistaticDelayCorrection'] = self.umm_get( self.umm, - "AdditionalAttributes", - ("Name", "BISTATIC_DELAY_CORRECTION"), - "Values", + 'AdditionalAttributes', + ('Name', 'BISTATIC_DELAY_CORRECTION'), + 'Values', 0, ) - if processingLevel == "RTC": - self.properties["noiseCorrection"] = self.umm_get( + if processingLevel == 'RTC': + self.properties['noiseCorrection'] = self.umm_get( self.umm, - "AdditionalAttributes", - ("Name", "NOISE_CORRECTION"), - "Values", + 'AdditionalAttributes', + ('Name', 'NOISE_CORRECTION'), + 'Values', 0, ) - self.properties["postProcessingFilter"] = self.umm_get( + self.properties['postProcessingFilter'] = self.umm_get( self.umm, - "AdditionalAttributes", - ("Name", "POST_PROCESSING_FILTER"), - "Values", + 'AdditionalAttributes', + ('Name', 'POST_PROCESSING_FILTER'), + 'Values', 0, ) @staticmethod -<<<<<<< HEAD - def get_property_paths() -> Dict: - return {**S1Product.get_property_paths(), **OPERAS1Product._base_properties} - - @staticmethod -======= ->>>>>>> master def get_default_baseline_product_type() -> None: """ Returns the product type to search for when building a baseline stack. @@ -141,8 +116,8 @@ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: def get_sort_keys(self) -> Tuple[str, str]: keys = super().get_sort_keys() - if keys[0] == "": - return (self._read_property("validityStartDate", ""), keys[1]) + if keys[0] == '': + return (self._read_property('validityStartDate', ''), keys[1]) return keys @@ -150,5 +125,5 @@ def get_sort_keys(self) -> Tuple[str, str]: def _is_subclass(item: Dict) -> bool: # not all umm products have this field set, # but when it's available it's convenient for fast matching - concept_id = item["meta"].get("collection-concept-id") + concept_id = item['meta'].get('collection-concept-id') return concept_id in OPERAS1Product._subclass_concept_ids diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 5ea2889c..d25b005a 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -1,11 +1,6 @@ from typing import Dict, Union -<<<<<<< HEAD from asf_search import ASFSession, ASFStackableProduct -from asf_search.CMR.translate import try_parse_float -======= -from asf_search import ASFSearchOptions, ASFSession, ASFProduct, ASFStackableProduct from asf_search.CMR.translate import try_parse_float, try_parse_int ->>>>>>> master from asf_search.constants import PRODUCT_TYPE @@ -15,19 +10,6 @@ class RADARSATProduct(ASFStackableProduct): """ _base_properties = { -<<<<<<< HEAD - "faradayRotation": { - "path": ["AdditionalAttributes", ("Name", "FARADAY_ROTATION"), "Values", 0], - "cast": try_parse_float, - }, - "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, - "beamModeType": { - "path": ["AdditionalAttributes", ("Name", "BEAM_MODE_TYPE"), "Values", 0] - }, - "insarStackId": { - "path": ["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0] - }, -======= **ASFStackableProduct._base_properties, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, @@ -35,23 +17,12 @@ class RADARSATProduct(ASFStackableProduct): 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, ->>>>>>> master } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) @staticmethod -<<<<<<< HEAD - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **RADARSATProduct._base_properties, - } - - @staticmethod -======= ->>>>>>> master def get_default_baseline_product_type() -> Union[str, None]: """ Returns the product type to search for when building a baseline stack. diff --git a/asf_search/Products/S1BurstProduct.py b/asf_search/Products/S1BurstProduct.py index 73a17231..3bc8fb44 100644 --- a/asf_search/Products/S1BurstProduct.py +++ b/asf_search/Products/S1BurstProduct.py @@ -22,55 +22,6 @@ class S1BurstProduct(S1Product): """ _base_properties = { -<<<<<<< HEAD - "bytes": { - "path": ["AdditionalAttributes", ("Name", "BYTE_LENGTH"), "Values", 0] - }, - "absoluteBurstID": { - "path": [ - "AdditionalAttributes", - ("Name", "BURST_ID_ABSOLUTE"), - "Values", - 0, - ], - "cast": try_parse_int, - }, - "relativeBurstID": { - "path": [ - "AdditionalAttributes", - ("Name", "BURST_ID_RELATIVE"), - "Values", - 0, - ], - "cast": try_parse_int, - }, - "fullBurstID": { - "path": ["AdditionalAttributes", ("Name", "BURST_ID_FULL"), "Values", 0] - }, - "burstIndex": { - "path": ["AdditionalAttributes", ("Name", "BURST_INDEX"), "Values", 0], - "cast": try_parse_int, - }, - "samplesPerBurst": { - "path": [ - "AdditionalAttributes", - ("Name", "SAMPLES_PER_BURST"), - "Values", - 0, - ], - "cast": try_parse_int, - }, - "subswath": { - "path": ["AdditionalAttributes", ("Name", "SUBSWATH_NAME"), "Values", 0] - }, - "azimuthTime": { - "path": ["AdditionalAttributes", ("Name", "AZIMUTH_TIME"), "Values", 0], - "cast": try_parse_date, - }, - "azimuthAnxTime": { - "path": ["AdditionalAttributes", ("Name", "AZIMUTH_ANX_TIME"), "Values", 0] - }, -======= **S1Product._base_properties, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]}, 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int}, @@ -81,7 +32,6 @@ class S1BurstProduct(S1Product): 'subswath': {'path': ['AdditionalAttributes', ('Name', 'SUBSWATH_NAME'), 'Values', 0]}, 'azimuthTime': {'path': ['AdditionalAttributes', ('Name', 'AZIMUTH_TIME'), 'Values', 0], 'cast': try_parse_date}, 'azimuthAnxTime': {'path': ['AdditionalAttributes', ('Name', 'AZIMUTH_ANX_TIME'), 'Values', 0]}, ->>>>>>> master } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @@ -126,13 +76,6 @@ def get_stack_opts(self, opts: ASFSearchOptions = None): stack_opts.polarization = [self.properties["polarization"]] return stack_opts -<<<<<<< HEAD - @staticmethod - def get_property_paths() -> Dict: - return {**S1Product.get_property_paths(), **S1BurstProduct._base_properties} - -======= ->>>>>>> master def _get_additional_filenames_and_urls(self, default_filename: str = None): # Burst XML filenames are just numbers, this makes it more indentifiable if default_filename is None: diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index bfa12732..27deb703 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -17,7 +17,10 @@ class S1Product(ASFStackableProduct): _base_properties = { **ASFStackableProduct._base_properties, - 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) + 'frameNumber': { + 'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], + 'cast': try_parse_int, + }, # Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']}, @@ -40,10 +43,7 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): def has_baseline(self) -> bool: baseline = self.get_baseline_calc_properties() - return ( - baseline is not None - and None not in baseline["stateVectors"]["positions"].values() - ) + return baseline is not None and None not in baseline['stateVectors']['positions'].values() def get_baseline_calc_properties(self) -> Dict: """ @@ -51,14 +51,12 @@ def get_baseline_calc_properties(self) -> Dict: """ ascendingNodeTime = self.umm_cast( self._parse_timestamp, - self.umm_get( - self.umm, "AdditionalAttributes", ("Name", "ASC_NODE_TIME"), "Values", 0 - ), + self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'ASC_NODE_TIME'), 'Values', 0), ) return { - "stateVectors": self.get_state_vectors(), - "ascendingNodeTime": ascendingNodeTime, + 'stateVectors': self.get_state_vectors(), + 'ascendingNodeTime': ascendingNodeTime, } def get_state_vectors(self) -> Dict: @@ -70,32 +68,32 @@ def get_state_vectors(self) -> Dict: velocities = {} sv_pre_position = self.umm_get( - self.umm, "AdditionalAttributes", ("Name", "SV_POSITION_PRE"), "Values", 0 + self.umm, 'AdditionalAttributes', ('Name', 'SV_POSITION_PRE'), 'Values', 0 ) sv_post_position = self.umm_get( - self.umm, "AdditionalAttributes", ("Name", "SV_POSITION_POST"), "Values", 0 + self.umm, 'AdditionalAttributes', ('Name', 'SV_POSITION_POST'), 'Values', 0 ) sv_pre_velocity = self.umm_get( - self.umm, "AdditionalAttributes", ("Name", "SV_VELOCITY_PRE"), "Values", 0 + self.umm, 'AdditionalAttributes', ('Name', 'SV_VELOCITY_PRE'), 'Values', 0 ) sv_post_velocity = self.umm_get( - self.umm, "AdditionalAttributes", ("Name", "SV_VELOCITY_POST"), "Values", 0 + self.umm, 'AdditionalAttributes', ('Name', 'SV_VELOCITY_POST'), 'Values', 0 ) - positions["prePosition"], positions["prePositionTime"] = self.umm_cast( + positions['prePosition'], positions['prePositionTime'] = self.umm_cast( self._parse_state_vector, sv_pre_position ) - positions["postPosition"], positions["postPositionTime"] = self.umm_cast( + positions['postPosition'], positions['postPositionTime'] = self.umm_cast( self._parse_state_vector, sv_post_position ) - velocities["preVelocity"], velocities["preVelocityTime"] = self.umm_cast( + velocities['preVelocity'], velocities['preVelocityTime'] = self.umm_cast( self._parse_state_vector, sv_pre_velocity ) - velocities["postVelocity"], velocities["postVelocityTime"] = self.umm_cast( + velocities['postVelocity'], velocities['postVelocityTime'] = self.umm_cast( self._parse_state_vector, sv_post_velocity ) - return {"positions": positions, "velocities": velocities} + return {'positions': positions, 'velocities': velocities} def _parse_timestamp(self, timestamp: str) -> Optional[str]: if timestamp is None: @@ -103,14 +101,12 @@ def _parse_timestamp(self, timestamp: str) -> Optional[str]: return try_parse_date(timestamp) - def _parse_state_vector( - self, state_vector: str - ) -> Tuple[Optional[List], Optional[str]]: + def _parse_state_vector(self, state_vector: str) -> Tuple[Optional[List], Optional[str]]: if state_vector is None: return None, None - velocity = [float(val) for val in state_vector.split(",")[:3]] - timestamp = self._parse_timestamp(state_vector.split(",")[-1]) + velocity = [float(val) for val in state_vector.split(',')[:3]] + timestamp = self._parse_timestamp(state_vector.split(',')[-1]) return velocity, timestamp @@ -125,25 +121,25 @@ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: stack_opts = ASFSearchOptions() if opts is None else copy(opts) stack_opts.processingLevel = self.get_default_baseline_product_type() - stack_opts.beamMode = [self.properties["beamModeType"]] - stack_opts.flightDirection = self.properties["flightDirection"] - stack_opts.relativeOrbit = [int(self.properties["pathNumber"])] # path + stack_opts.beamMode = [self.properties['beamModeType']] + stack_opts.flightDirection = self.properties['flightDirection'] + stack_opts.relativeOrbit = [int(self.properties['pathNumber'])] # path stack_opts.platform = [PLATFORM.SENTINEL1A, PLATFORM.SENTINEL1B] - if self.properties["polarization"] in ["HH", "HH+HV"]: - stack_opts.polarization = ["HH", "HH+HV"] + if self.properties['polarization'] in ['HH', 'HH+HV']: + stack_opts.polarization = ['HH', 'HH+HV'] else: - stack_opts.polarization = ["VV", "VV+VH"] + stack_opts.polarization = ['VV', 'VV+VH'] stack_opts.intersectsWith = self.centroid().wkt return stack_opts def is_valid_reference(self) -> bool: - keys = ["postPosition", "postPositionTime", "prePosition", "postPositionTime"] + keys = ['postPosition', 'postPositionTime', 'prePosition', 'postPositionTime'] for key in keys: - if self.baseline["stateVectors"]["positions"].get(key) is None: + if self.baseline['stateVectors']['positions'].get(key) is None: return False return True diff --git a/asf_search/Products/SEASATProduct.py b/asf_search/Products/SEASATProduct.py index bbb7064a..aeecca61 100644 --- a/asf_search/Products/SEASATProduct.py +++ b/asf_search/Products/SEASATProduct.py @@ -9,29 +9,14 @@ class SEASATProduct(ASFProduct): """ _base_properties = { -<<<<<<< HEAD - "bytes": { - "path": ["AdditionalAttributes", ("Name", "BYTES"), "Values", 0], - "cast": try_round_float, - }, - "insarStackId": { - "path": ["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0] - }, - "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, -======= **ASFProduct._base_properties, - 'bytes': {'path': [ 'AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, - 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, - 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, ->>>>>>> master + 'bytes': { + 'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], + 'cast': try_round_float, + }, + 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) -<<<<<<< HEAD - - @staticmethod - def get_property_paths() -> Dict: - return {**ASFProduct.get_property_paths(), **SEASATProduct._base_properties} -======= ->>>>>>> master diff --git a/asf_search/Products/SIRCProduct.py b/asf_search/Products/SIRCProduct.py index 5c7cde32..985fae4b 100644 --- a/asf_search/Products/SIRCProduct.py +++ b/asf_search/Products/SIRCProduct.py @@ -8,30 +8,12 @@ class SIRCProduct(ASFProduct): """ _base_properties = { -<<<<<<< HEAD - "groupID": { - "path": ["AdditionalAttributes", ("Name", "GROUP_ID"), "Values", 0] - }, - "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, - "pgeVersion": {"path": ["PGEVersionClass", "PGEVersion"]}, - "beamModeType": { - "path": ["AdditionalAttributes", ("Name", "BEAM_MODE_TYPE"), "Values", 0] - }, -======= **ASFProduct._base_properties, - 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, - 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, - 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion'] }, + 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, + 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, + 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, ->>>>>>> master } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) -<<<<<<< HEAD - - @staticmethod - def get_property_paths() -> Dict: - return {**ASFProduct.get_property_paths(), **SIRCProduct._base_properties} -======= ->>>>>>> master diff --git a/asf_search/Products/SMAPProduct.py b/asf_search/Products/SMAPProduct.py index ba852562..0e9ba728 100644 --- a/asf_search/Products/SMAPProduct.py +++ b/asf_search/Products/SMAPProduct.py @@ -9,28 +9,11 @@ class SMAPProduct(ASFProduct): """ _base_properties = { -<<<<<<< HEAD - "groupID": { - "path": ["AdditionalAttributes", ("Name", "GROUP_ID"), "Values", 0] - }, - "insarStackId": { - "path": ["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0] - }, - "md5sum": {"path": ["AdditionalAttributes", ("Name", "MD5SUM"), "Values", 0]}, -======= **ASFProduct._base_properties, - 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, - 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, - 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, ->>>>>>> master + 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, + 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) -<<<<<<< HEAD - - @staticmethod - def get_property_paths() -> Dict: - return {**ASFProduct.get_property_paths(), **SMAPProduct._base_properties} -======= ->>>>>>> master diff --git a/asf_search/Products/UAVSARProduct.py b/asf_search/Products/UAVSARProduct.py index 1e9133b1..e1f4cb97 100644 --- a/asf_search/Products/UAVSARProduct.py +++ b/asf_search/Products/UAVSARProduct.py @@ -9,9 +9,9 @@ class UAVSARProduct(ASFProduct): _base_properties = { **ASFProduct._base_properties, - 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, - 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, - 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, + 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, + 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): diff --git a/asf_search/WKT/RepairEntry.py b/asf_search/WKT/RepairEntry.py index 8ce05eff..0a981937 100644 --- a/asf_search/WKT/RepairEntry.py +++ b/asf_search/WKT/RepairEntry.py @@ -4,4 +4,4 @@ def __init__(self, report_type: str, report: str) -> None: self.report = report def __str__(self) -> str: - return f"{self.report_type}: {self.report}" + return f'{self.report_type}: {self.report}' diff --git a/asf_search/WKT/validate_wkt.py b/asf_search/WKT/validate_wkt.py index 5f3adc1d..dd7e1449 100644 --- a/asf_search/WKT/validate_wkt.py +++ b/asf_search/WKT/validate_wkt.py @@ -39,9 +39,7 @@ def validate_wkt( f'WKT string: "{aoi_shape.wkt}" is a self intersecting polygon' ) - raise ASFWKTError( - f'WKT string: "{aoi_shape.wkt}" is not a valid WKT string' - ) + raise ASFWKTError(f'WKT string: "{aoi_shape.wkt}" is not a valid WKT string') if aoi_shape.is_empty: raise ASFWKTError(f'WKT string: "{aoi_shape.wkt}" empty WKT is not a valid AOI') @@ -90,7 +88,7 @@ def _simplify_geometry( RepairEntry( report_type="'type': 'EXTRA_DIMENSION'", report="'report': Only 2-Dimensional area of interests are supported (lon/lat), " - "higher dimension coordinates will be ignored", + 'higher dimension coordinates will be ignored', ) if geometry.has_z else None @@ -109,11 +107,9 @@ def _simplify_geometry( ] for report in repair_reports: if report is not None: - logging.info(f"{report}") + logging.info(f'{report}') - validated_wrapped = transform( - lambda x, y, z=None: tuple([round(x, 14), round(y, 14)]), wrapped - ) + validated_wrapped = transform(lambda x, y, z=None: tuple([round(x, 14), round(y, 14)]), wrapped) validated_unwrapped = transform( lambda x, y, z=None: tuple([round(x, 14), round(y, 14)]), unwrapped ) @@ -168,9 +164,7 @@ def _merge_overlapping_geometry( if isinstance(merged, BaseMultipartGeometry): unique_shapes = len(merged.geoms) merged = orient( - unary_union( - GeometryCollection([geom.convex_hull for geom in merged.geoms]) - ) + unary_union(GeometryCollection([geom.convex_hull for geom in merged.geoms])) ) if isinstance(merged, BaseMultipartGeometry): if unique_shapes != len(merged.geoms): @@ -182,7 +176,7 @@ def _merge_overlapping_geometry( else: merge_report = RepairEntry( "'type': 'OVERLAP_MERGE'", - f"'report': {unique_shapes} non-overlapping shapes merged by their convex-hulls", # noqa F401 + f"'report': {unique_shapes} non-overlapping shapes merged by their convex-hulls", # noqa F401 ) else: merge_report = RepairEntry( @@ -201,9 +195,7 @@ def _counter_clockwise_reorientation(geometry: Union[Point, LineString, Polygon] Ensures the geometry coordinates are wound counter-clockwise output: counter-clockwise oriented geometry """ - reoriented_report = RepairEntry( - "'type': 'REVERSE'", "'report': Reversed polygon winding order" - ) + reoriented_report = RepairEntry("'type': 'REVERSE'", "'report': Reversed polygon winding order") reoriented = orient(geometry) if isinstance(geometry, Polygon): @@ -284,16 +276,16 @@ def _get_convex_hull(geometry: BaseGeometry) -> Tuple[BaseGeometry, RepairEntry] output: convex hull of multi-part geometry, or the original single-shaped geometry """ if geometry.geom_type not in [ - "MultiPoint", - "MultiLineString", - "MultiPolygon", - "GeometryCollection", + 'MultiPoint', + 'MultiLineString', + 'MultiPolygon', + 'GeometryCollection', ]: return geometry, None possible_repair = RepairEntry( "'type': 'CONVEX_HULL_INDIVIDUAL'", - "'report': 'Unconnected shapes: Convex-hulled each INDIVIDUAL shape to merge them together.'", # noqa F401 + "'report': 'Unconnected shapes: Convex-hulled each INDIVIDUAL shape to merge them together.'", # noqa F401 ) return geometry.convex_hull, possible_repair @@ -313,7 +305,7 @@ def _simplify_aoi( """ repairs = [] - if shape.geom_type == "Point": + if shape.geom_type == 'Point': return shape, repairs # Check for very small shapes and collapse accordingly @@ -325,13 +317,13 @@ def _simplify_aoi( repair = RepairEntry( "'type': 'GEOMETRY_SIMPLIFICATION'", "'report': 'Shape Collapsed to Point: " - f"shape of {_get_shape_coords_len(shape)} " - f"simplified to {_get_shape_coords_len(simplified)} " + f'shape of {_get_shape_coords_len(shape)} ' + f'simplified to {_get_shape_coords_len(simplified)} ' f"with proximity threshold of {threshold}'", ) return simplified, [*repairs, repair] # If it's a single line segment, it's already as simple as can be. Don't do anything - elif shape.geom_type == "LineString" and len(shape.coords) == 2: + elif shape.geom_type == 'LineString' and len(shape.coords) == 2: return shape, repairs # Else, check if it's slim enough to become a linestring: elif mbr_width <= threshold: @@ -340,7 +332,7 @@ def _simplify_aoi( repair = RepairEntry( "'type': 'GEOMETRY_SIMPLIFICATION'", f"'report': 'Shape Collapsed to Vertical Line: shape of {_get_shape_coords_len(shape)} " - f"simplified to {_get_shape_coords_len(simplified)} " + f'simplified to {_get_shape_coords_len(simplified)} ' f"with proximity threshold of {threshold}'", ) return simplified, [*repairs, repair] @@ -350,7 +342,7 @@ def _simplify_aoi( repair = RepairEntry( "'type': 'GEOMETRY_SIMPLIFICATION'", "'report': 'Shape Collapsed to Horizontal Line: " - f"shape of {_get_shape_coords_len(shape)} simplified " + f'shape of {_get_shape_coords_len(shape)} simplified ' f"to {_get_shape_coords_len(simplified)} with proximity threshold of {threshold}'", ) return simplified, [*repairs, repair] @@ -372,7 +364,7 @@ def _simplify_aoi( if coords_length <= 300: return simplifed, repairs - raise ASFWKTError(f"Failed to simplify wkt string: {shape.wkt}") + raise ASFWKTError(f'Failed to simplify wkt string: {shape.wkt}') def _clamp(num): @@ -386,13 +378,13 @@ def _get_shape_coords_len(geometry: BaseGeometry): def _get_shape_coords(geometry: BaseGeometry): """Returns flattened coordinates of input Shapely geometry""" - if geometry.geom_type == "Polygon": + if geometry.geom_type == 'Polygon': return list(geometry.exterior.coords[:-1]) - if geometry.geom_type == "LineString": + if geometry.geom_type == 'LineString': return list(geometry.coords) - if geometry.geom_type == "Point": + if geometry.geom_type == 'Point': return list(geometry.coords) output = [] diff --git a/asf_search/__init__.py b/asf_search/__init__.py index 3367e5de..f552fe7a 100644 --- a/asf_search/__init__.py +++ b/asf_search/__init__.py @@ -24,7 +24,6 @@ "Install with 'python3 -m pip install -e .' to use" ) from e -<<<<<<< HEAD ASF_LOGGER = logging.getLogger(__name__) # Add null handle so we do nothing by default. It's up to whatever # imports us, if they want logging. @@ -54,24 +53,6 @@ from .baseline import * # noqa: F403 F401 E402 from .WKT import validate_wkt # noqa: F401 E402 from .export import * # noqa: F403 F401 E402 -======= -from .ASFSession import ASFSession -from .ASFProduct import ASFProduct -from .ASFStackableProduct import ASFStackableProduct -from .ASFSearchResults import ASFSearchResults -from .ASFSearchOptions import ASFSearchOptions, validators, validator_map -from .Products import * -from .exceptions import * -from .constants import BEAMMODE, FLIGHT_DIRECTION, INSTRUMENT, PLATFORM, POLARIZATION, PRODUCT_TYPE, INTERNAL, DATASET -from .exceptions import * -from .health import * -from .search import * -from .download import * -from .CMR import * -from .baseline import * -from .WKT import validate_wkt -from .export import * ->>>>>>> master REPORT_ERRORS = True """Enables automatic search error reporting to ASF, send any questions to uso@asf.alaska.edu""" diff --git a/asf_search/baseline/calc.py b/asf_search/baseline/calc.py index cdf7941c..33d2654b 100644 --- a/asf_search/baseline/calc.py +++ b/asf_search/baseline/calc.py @@ -4,14 +4,10 @@ import numpy as np -<<<<<<< HEAD -from asf_search import ASFProduct -======= try: from ciso8601 import parse_datetime except ImportError: from dateutil.parser import parse as parse_datetime ->>>>>>> master # WGS84 constants a = 6378137 diff --git a/asf_search/baseline/stack.py b/asf_search/baseline/stack.py index 26bdb6fa..c1ef9eec 100644 --- a/asf_search/baseline/stack.py +++ b/asf_search/baseline/stack.py @@ -1,13 +1,5 @@ -<<<<<<< HEAD -from typing import Tuple, List, Union -from ciso8601 import parse_datetime -import pytz - -from .calc import calculate_perpendicular_baselines -======= ->>>>>>> master from asf_search import ASFProduct, ASFStackableProduct, ASFSearchResults -from typing import Tuple, List +from typing import Tuple, List, Union import pytz from .calc import calculate_perpendicular_baselines diff --git a/asf_search/constants/BEAMMODE.py b/asf_search/constants/BEAMMODE.py index 0201200c..203efce9 100644 --- a/asf_search/constants/BEAMMODE.py +++ b/asf_search/constants/BEAMMODE.py @@ -1,47 +1,47 @@ -IW = "IW" -EW = "EW" -S1 = "S1" -S2 = "S2" -S3 = "S3" -S4 = "S4" -S5 = "S5" -S6 = "S6" -WV = "WV" -DSN = "DSN" -FBS = "FBS" -FBD = "FBD" -PLR = "PLR" -WB1 = "WB1" -WB2 = "WB2" -OBS = "OBS" -SIRC11 = "11" -SIRC13 = "13" -SIRC16 = "16" -SIRC20 = "20" -SLC = "SLC" -STD = "STD" -POL = "POL" -RPI = "RPI" -EH3 = "EH3" -EH4 = "EH4" -EH6 = "EH6" -EL1 = "EL1" -FN1 = "FN1" -FN2 = "FN2" -FN3 = "FN3" -FN4 = "FN4" -FN5 = "FN5" -SNA = "SNA" -SNB = "SNB" -ST1 = "ST1" -ST2 = "ST2" -ST3 = "ST3" -ST4 = "ST4" -ST5 = "ST5" -ST6 = "ST6" -ST7 = "ST7" -SWA = "SWA" -SWB = "SWB" -WD1 = "WD1" -WD2 = "WD2" -WD3 = "WD3" +IW = 'IW' +EW = 'EW' +S1 = 'S1' +S2 = 'S2' +S3 = 'S3' +S4 = 'S4' +S5 = 'S5' +S6 = 'S6' +WV = 'WV' +DSN = 'DSN' +FBS = 'FBS' +FBD = 'FBD' +PLR = 'PLR' +WB1 = 'WB1' +WB2 = 'WB2' +OBS = 'OBS' +SIRC11 = '11' +SIRC13 = '13' +SIRC16 = '16' +SIRC20 = '20' +SLC = 'SLC' +STD = 'STD' +POL = 'POL' +RPI = 'RPI' +EH3 = 'EH3' +EH4 = 'EH4' +EH6 = 'EH6' +EL1 = 'EL1' +FN1 = 'FN1' +FN2 = 'FN2' +FN3 = 'FN3' +FN4 = 'FN4' +FN5 = 'FN5' +SNA = 'SNA' +SNB = 'SNB' +ST1 = 'ST1' +ST2 = 'ST2' +ST3 = 'ST3' +ST4 = 'ST4' +ST5 = 'ST5' +ST6 = 'ST6' +ST7 = 'ST7' +SWA = 'SWA' +SWB = 'SWB' +WD1 = 'WD1' +WD2 = 'WD2' +WD3 = 'WD3' diff --git a/asf_search/constants/DATASET.py b/asf_search/constants/DATASET.py index 2b894b1d..fb705b95 100644 --- a/asf_search/constants/DATASET.py +++ b/asf_search/constants/DATASET.py @@ -1,16 +1,16 @@ -SENTINEL1 = "SENTINEL-1" -OPERA_S1 = "OPERA-S1" -OPERA_S1_CALVAL = "OPERA-S1-CALVAL" -SLC_BURST = "SLC-BURST" -ALOS_PALSAR = "ALOS PALSAR" -ALOS_AVNIR_2 = "ALOS AVNIR-2" -SIRC = "SIR-C" -ARIA_S1_GUNW = "ARIA S1 GUNW" -SMAP = "SMAP" -UAVSAR = "UAVSAR" -RADARSAT_1 = "RADARSAT-1" -ERS = "ERS" -JERS_1 = "JERS-1" -AIRSAR = "AIRSAR" -SEASAT = "SEASAT" -NISAR = "NISAR" +SENTINEL1 = 'SENTINEL-1' +OPERA_S1 = 'OPERA-S1' +OPERA_S1_CALVAL = 'OPERA-S1-CALVAL' +SLC_BURST = 'SLC-BURST' +ALOS_PALSAR = 'ALOS PALSAR' +ALOS_AVNIR_2 = 'ALOS AVNIR-2' +SIRC = 'SIR-C' +ARIA_S1_GUNW = 'ARIA S1 GUNW' +SMAP = 'SMAP' +UAVSAR = 'UAVSAR' +RADARSAT_1 = 'RADARSAT-1' +ERS = 'ERS' +JERS_1 = 'JERS-1' +AIRSAR = 'AIRSAR' +SEASAT = 'SEASAT' +NISAR = 'NISAR' diff --git a/asf_search/constants/FLIGHT_DIRECTION.py b/asf_search/constants/FLIGHT_DIRECTION.py index a1ac154b..c4e942e0 100644 --- a/asf_search/constants/FLIGHT_DIRECTION.py +++ b/asf_search/constants/FLIGHT_DIRECTION.py @@ -1,2 +1,2 @@ -ASCENDING = "ASCENDING" -DESCENDING = "DESCENDING" +ASCENDING = 'ASCENDING' +DESCENDING = 'DESCENDING' diff --git a/asf_search/constants/INSTRUMENT.py b/asf_search/constants/INSTRUMENT.py index 437d9a41..efd19451 100644 --- a/asf_search/constants/INSTRUMENT.py +++ b/asf_search/constants/INSTRUMENT.py @@ -1,3 +1,3 @@ -C_SAR = "C-SAR" -PALSAR = "PALSAR" -AVNIR_2 = "AVNIR-2" +C_SAR = 'C-SAR' +PALSAR = 'PALSAR' +AVNIR_2 = 'AVNIR-2' diff --git a/asf_search/constants/PLATFORM.py b/asf_search/constants/PLATFORM.py index 01a40fb5..fab0e644 100644 --- a/asf_search/constants/PLATFORM.py +++ b/asf_search/constants/PLATFORM.py @@ -1,15 +1,15 @@ -SENTINEL1 = "SENTINEL-1" -SENTINEL1A = "Sentinel-1A" -SENTINEL1B = "Sentinel-1B" -SIRC = "SIR-C" -ALOS = "ALOS" -ERS = "ERS" -ERS1 = "ERS-1" -ERS2 = "ERS-2" -JERS = "JERS-1" -RADARSAT = "RADARSAT-1" -AIRSAR = "AIRSAR" -SEASAT = "SEASAT 1" -SMAP = "SMAP" -UAVSAR = "UAVSAR" -NISAR = "NISAR" +SENTINEL1 = 'SENTINEL-1' +SENTINEL1A = 'Sentinel-1A' +SENTINEL1B = 'Sentinel-1B' +SIRC = 'SIR-C' +ALOS = 'ALOS' +ERS = 'ERS' +ERS1 = 'ERS-1' +ERS2 = 'ERS-2' +JERS = 'JERS-1' +RADARSAT = 'RADARSAT-1' +AIRSAR = 'AIRSAR' +SEASAT = 'SEASAT 1' +SMAP = 'SMAP' +UAVSAR = 'UAVSAR' +NISAR = 'NISAR' diff --git a/asf_search/constants/POLARIZATION.py b/asf_search/constants/POLARIZATION.py index d38e3c9f..2e080e10 100644 --- a/asf_search/constants/POLARIZATION.py +++ b/asf_search/constants/POLARIZATION.py @@ -1,16 +1,16 @@ -HH = "HH" -VV = "VV" -VV_VH = "VV+VH" -HH_HV = "HH+HV" -DUAL_HH = "DUAL HH" -DUAL_VV = "DUAL VV" -DUAL_HV = "DUAL HV" -DUAL_VH = "DUAL VH" -HH_3SCAN = "HH 3SCAN" -HH_4SCAN = "HH 4SCAN" -HH_5SCAN = "HH 5SCAN" -QUAD = "quadrature" -HH_VV = "HH+VV" -HH_HV_VH_VV = "HH+HV+VH+VV" -FULL = "full" -UNKNOWN = "UNKNOWN" +HH = 'HH' +VV = 'VV' +VV_VH = 'VV+VH' +HH_HV = 'HH+HV' +DUAL_HH = 'DUAL HH' +DUAL_VV = 'DUAL VV' +DUAL_HV = 'DUAL HV' +DUAL_VH = 'DUAL VH' +HH_3SCAN = 'HH 3SCAN' +HH_4SCAN = 'HH 4SCAN' +HH_5SCAN = 'HH 5SCAN' +QUAD = 'quadrature' +HH_VV = 'HH+VV' +HH_HV_VH_VV = 'HH+HV+VH+VV' +FULL = 'full' +UNKNOWN = 'UNKNOWN' diff --git a/asf_search/constants/PRODUCT_TYPE.py b/asf_search/constants/PRODUCT_TYPE.py index 949eb113..2c97b4c9 100644 --- a/asf_search/constants/PRODUCT_TYPE.py +++ b/asf_search/constants/PRODUCT_TYPE.py @@ -1,29 +1,29 @@ # Sentinel-1 -GRD_HD = "GRD_HD" -GRD_MD = "GRD_MD" -GRD_MS = "GRD_MS" -GRD_HS = "GRD_HS" -GRD_FD = "GRD_FD" -SLC = "SLC" -OCN = "OCN" -RAW = "RAW" -METADATA_GRD_HD = "METADATA_GRD_HD" -METADATA_GRD_MD = "METADATA_GRD_MD" -METADATA_GRD_MS = "METADATA_GRD_MS" -METADATA_GRD_HS = "METADATA_GRD_HS" -METADATA_SLC = "METADATA_SLC" -METADATA_OCN = "METADATA_OCN" -METADATA_RAW = "METADATA_RAW" -BURST = "BURST" +GRD_HD = 'GRD_HD' +GRD_MD = 'GRD_MD' +GRD_MS = 'GRD_MS' +GRD_HS = 'GRD_HS' +GRD_FD = 'GRD_FD' +SLC = 'SLC' +OCN = 'OCN' +RAW = 'RAW' +METADATA_GRD_HD = 'METADATA_GRD_HD' +METADATA_GRD_MD = 'METADATA_GRD_MD' +METADATA_GRD_MS = 'METADATA_GRD_MS' +METADATA_GRD_HS = 'METADATA_GRD_HS' +METADATA_SLC = 'METADATA_SLC' +METADATA_OCN = 'METADATA_OCN' +METADATA_RAW = 'METADATA_RAW' +BURST = 'BURST' # ALOS PALSAR -L1_0 = "L1.0" -L1_1 = "L1.1" -L1_5 = "L1.5" -L2_2 = "L2.2" -RTC_LOW_RES = "RTC_LOW_RES" -RTC_HIGH_RES = "RTC_HI_RES" -KMZ = "KMZ" +L1_0 = 'L1.0' +L1_1 = 'L1.1' +L1_5 = 'L1.5' +L2_2 = 'L2.2' +RTC_LOW_RES = 'RTC_LOW_RES' +RTC_HIGH_RES = 'RTC_HI_RES' +KMZ = 'KMZ' # ALOS AVNIR # No PROCESSING_TYPE attribute in CMR @@ -32,45 +32,45 @@ # SLC and SLC metadata are both 'SLC', provided by Sentinel-1 constants # Sentinel-1 InSAR -GUNW_STD = "GUNW_STD" -GUNW_AMP = "GUNW_AMP" -GUNW_CON = "GUNW_CON" -GUN_COH = "GUNW_COH" -GUNW_UNW = "GUNW_UNW" +GUNW_STD = 'GUNW_STD' +GUNW_AMP = 'GUNW_AMP' +GUNW_CON = 'GUNW_CON' +GUN_COH = 'GUNW_COH' +GUNW_UNW = 'GUNW_UNW' # SMAP -L1A_RADAR_RO_HDF5 = "L1A_Radar_RO_HDF5" -L1A_RADAR_HDF5 = "L1A_Radar_HDF5" -L1B_S0_LOW_RES_HDF5 = "L1B_S0_LoRes_HDF5" -L1C_S0_HIGH_RES_HDF5 = "L1C_S0_HiRes_HDF5" -L1A_RADAR_RO_QA = "L1A_Radar_RO_QA" -L1A_RADAR_QA = "L1A_Radar_QA" -L1B_S0_LOW_RES_QA = "L1B_S0_LoRes_QA" -L1C_S0_HIGH_RES_QA = "L1C_S0_HiRes_QA" -L1A_RADAR_RO_ISO_XML = "L1A_Radar_RO_ISO_XML" -L1B_S0_LOW_RES_ISO_XML = "L1B_S0_LoRes_ISO_XML" -L1C_S0_HIGH_RES_ISO_XML = "L1C_S0_HiRes_ISO_XML" +L1A_RADAR_RO_HDF5 = 'L1A_Radar_RO_HDF5' +L1A_RADAR_HDF5 = 'L1A_Radar_HDF5' +L1B_S0_LOW_RES_HDF5 = 'L1B_S0_LoRes_HDF5' +L1C_S0_HIGH_RES_HDF5 = 'L1C_S0_HiRes_HDF5' +L1A_RADAR_RO_QA = 'L1A_Radar_RO_QA' +L1A_RADAR_QA = 'L1A_Radar_QA' +L1B_S0_LOW_RES_QA = 'L1B_S0_LoRes_QA' +L1C_S0_HIGH_RES_QA = 'L1C_S0_HiRes_QA' +L1A_RADAR_RO_ISO_XML = 'L1A_Radar_RO_ISO_XML' +L1B_S0_LOW_RES_ISO_XML = 'L1B_S0_LoRes_ISO_XML' +L1C_S0_HIGH_RES_ISO_XML = 'L1C_S0_HiRes_ISO_XML' # UAVSAR -AMPLITUDE = "AMPLITUDE" -STOKES = "STOKES" -AMPLITUDE_GRD = "AMPLITUDE_GRD" -PROJECTED = "PROJECTED" -PROJECTED_ML5X5 = "PROJECTED_ML5X5" -PROJECTED_ML3X3 = "PROJECTED_ML3X3" -INTERFEROMETRY_GRD = "INTERFEROMETRY_GRD" -INTERFEROMETRY = "INTERFEROMETRY" -COMPLEX = "COMPLEX" +AMPLITUDE = 'AMPLITUDE' +STOKES = 'STOKES' +AMPLITUDE_GRD = 'AMPLITUDE_GRD' +PROJECTED = 'PROJECTED' +PROJECTED_ML5X5 = 'PROJECTED_ML5X5' +PROJECTED_ML3X3 = 'PROJECTED_ML3X3' +INTERFEROMETRY_GRD = 'INTERFEROMETRY_GRD' +INTERFEROMETRY = 'INTERFEROMETRY' +COMPLEX = 'COMPLEX' # KMZ provided by ALOS PALSAR -INC = "INC" -SLOPE = "SLOPE" -DEM_TIFF = "DEM_TIFF" -PAULI = "PAULI" -METADATA = "METADATA" +INC = 'INC' +SLOPE = 'SLOPE' +DEM_TIFF = 'DEM_TIFF' +PAULI = 'PAULI' +METADATA = 'METADATA' # RADARSAT -L0 = "L0" -L1 = "L1" +L0 = 'L0' +L1 = 'L1' # ERS # L0 provided by RADARSAT @@ -81,22 +81,22 @@ # L1 provided by RADARSAT # AIRSAR -CTIF = "CTIF" -PTIF = "PTIF" -LTIF = "LTIF" -JPG = "JPG" -LSTOKES = "LSTOKES" -PSTOKES = "PSTOKES" -CSTOKES = "CSTOKES" -DEM = "DEM" -THREEFP = "3FP" +CTIF = 'CTIF' +PTIF = 'PTIF' +LTIF = 'LTIF' +JPG = 'JPG' +LSTOKES = 'LSTOKES' +PSTOKES = 'PSTOKES' +CSTOKES = 'CSTOKES' +DEM = 'DEM' +THREEFP = '3FP' # SEASAT -GEOTIFF = "GEOTIFF" +GEOTIFF = 'GEOTIFF' # L1 provided by RADARSAT # OPERA-S1 -RTC = "RTC" -CSLC = "CSLC" -RTC_STATIC = "RTC-STATIC" -CSLC_STATIC = "CSLC-STATIC" +RTC = 'RTC' +CSLC = 'CSLC' +RTC_STATIC = 'RTC-STATIC' +CSLC_STATIC = 'CSLC-STATIC' diff --git a/asf_search/constants/__init__.py b/asf_search/constants/__init__.py index a190d426..dca8dc43 100644 --- a/asf_search/constants/__init__.py +++ b/asf_search/constants/__init__.py @@ -8,4 +8,4 @@ from .POLARIZATION import * # noqa: F403 F401 from .PRODUCT_TYPE import * # noqa: F403 F401 from .INTERNAL import * # noqa: F403 F401 -from .DATASET import * # noqa: F403 F401 +from .DATASET import * # noqa: F403 F401 diff --git a/asf_search/download/download.py b/asf_search/download/download.py index 6300fd5b..2de71db2 100644 --- a/asf_search/download/download.py +++ b/asf_search/download/download.py @@ -21,9 +21,7 @@ def _download_url(arg): download_url(url=url, path=path, session=session) -def download_urls( - urls: Iterable[str], path: str, session: ASFSession = None, processes: int = 1 -): +def download_urls(urls: Iterable[str], path: str, session: ASFSession = None, processes: int = 1): """ Downloads all products from the specified URLs to the specified location. @@ -47,9 +45,7 @@ def download_urls( pool.join() -def download_url( - url: str, path: str, filename: str = None, session: ASFSession = None -) -> None: +def download_url(url: str, path: str, filename: str = None, session: ASFSession = None) -> None: """ Downloads a product from the specified URL to the specified location and (optional) filename. @@ -64,12 +60,10 @@ def download_url( filename = os.path.split(parse.urlparse(url).path)[1] if not os.path.isdir(path): - raise ASFDownloadError(f"Error downloading {url}: directory not found: {path}") + raise ASFDownloadError(f'Error downloading {url}: directory not found: {path}') if os.path.isfile(os.path.join(path, filename)): - warnings.warn( - f"File already exists, skipping download: {os.path.join(path, filename)}" - ) + warnings.warn(f'File already exists, skipping download: {os.path.join(path, filename)}') return if session is None: @@ -77,12 +71,12 @@ def download_url( response = _try_get_response(session=session, url=url) - with open(os.path.join(path, filename), "wb") as f: + with open(os.path.join(path, filename), 'wb') as f: for chunk in response.iter_content(chunk_size=8192): f.write(chunk) -def remotezip(url: str, session: ASFSession) -> "RemoteZip": # type: ignore # noqa: F821 +def remotezip(url: str, session: ASFSession) -> 'RemoteZip': # type: ignore # noqa: F821 """ :param url: the url to the zip product :param session: the authenticated ASFSession to read and download from the zip file @@ -96,18 +90,18 @@ def remotezip(url: str, session: ASFSession) -> "RemoteZip": # type: ignore # n 'Ex: `python3 -m pip install asf-search[extras]`' ) - session.hooks["response"].append(strip_auth_if_aws) + session.hooks['response'].append(strip_auth_if_aws) return RemoteZip(url, session=session) def strip_auth_if_aws(r, *args, **kwargs): if ( - 300 <= r.status_code <= 399 and - "amazonaws.com" in parse.urlparse(r.headers["location"]).netloc + 300 <= r.status_code <= 399 + and 'amazonaws.com' in parse.urlparse(r.headers['location']).netloc ): - location = r.headers["location"] + location = r.headers['location'] r.headers.clear() - r.headers["location"] = location + r.headers['location'] = location # if it's an unprocessed burst product it'll return a 202 and we'll have to query again @@ -123,15 +117,13 @@ def _is_burst_processing(response: Response): stop=stop_after_delay(90), ) def _try_get_response(session: ASFSession, url: str): - response = session.get(url, stream=True, hooks={"response": strip_auth_if_aws}) + response = session.get(url, stream=True, hooks={'response': strip_auth_if_aws}) try: response.raise_for_status() except HTTPError as e: if 400 <= response.status_code <= 499: - raise ASFAuthenticationError( - f"HTTP {e.response.status_code}: {e.response.text}" - ) + raise ASFAuthenticationError(f'HTTP {e.response.status_code}: {e.response.text}') raise e diff --git a/asf_search/exceptions.py b/asf_search/exceptions.py index 5edc3a56..440cc3f6 100644 --- a/asf_search/exceptions.py +++ b/asf_search/exceptions.py @@ -13,6 +13,7 @@ class ASFSearch4xxError(ASFSearchError): class ASFSearch5xxError(ASFSearchError): """Raise when CMR returns a 5xx error""" + class ASFBaselineError(ASFSearchError): """Raise when baseline related errors occur""" diff --git a/asf_search/export/csv.py b/asf_search/export/csv.py index da8d340e..dbfb64a6 100644 --- a/asf_search/export/csv.py +++ b/asf_search/export/csv.py @@ -6,7 +6,6 @@ import inspect extra_csv_fields = [ -<<<<<<< HEAD ("sceneDate", ["AdditionalAttributes", ("Name", "ACQUISITION_DATE"), "Values", 0]), ("nearStartLat", ["AdditionalAttributes", ("Name", "NEAR_START_LAT"), "Values", 0]), ("nearStartLon", ["AdditionalAttributes", ("Name", "NEAR_START_LON"), "Values", 0]), @@ -34,23 +33,6 @@ "offNadirAngle", ["AdditionalAttributes", ("Name", "OFF_NADIR_ANGLE"), "Values", 0], ), -======= - ('sceneDate', ['AdditionalAttributes', ('Name', 'ACQUISITION_DATE'), 'Values', 0]), - ('nearStartLat', ['AdditionalAttributes', ('Name', 'NEAR_START_LAT'), 'Values', 0]), - ('nearStartLon', ['AdditionalAttributes', ('Name', 'NEAR_START_LON'), 'Values', 0]), - ('farStartLat', ['AdditionalAttributes', ('Name', 'FAR_START_LAT'), 'Values', 0]), - ('farStartLon', ['AdditionalAttributes', ('Name', 'FAR_START_LON'), 'Values', 0]), - ('nearEndLat', ['AdditionalAttributes', ('Name', 'NEAR_END_LAT'), 'Values', 0]), - ('nearEndLon', ['AdditionalAttributes', ('Name', 'NEAR_END_LON'), 'Values', 0]), - ('farEndLat', ['AdditionalAttributes', ('Name', 'FAR_END_LAT'), 'Values', 0]), - ('farEndLon', ['AdditionalAttributes', ('Name', 'FAR_END_LON'), 'Values', 0]), - ('faradayRotation', ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0]), - ('configurationName', ['AdditionalAttributes', ('Name', 'BEAM_MODE_DESC'), 'Values', 0]), - ('doppler', ['AdditionalAttributes', ('Name', 'DOPPLER'), 'Values', 0]), - ('sizeMB', ['DataGranule', 'ArchiveAndDistributionInformation', 0, 'Size']), - ('insarStackSize', ['AdditionalAttributes', ('Name', 'INSAR_STACK_SIZE'), 'Values', 0]), - ('offNadirAngle', ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0]), ->>>>>>> master ] fieldnames = ( diff --git a/asf_search/export/export_translators.py b/asf_search/export/export_translators.py index 832bbd50..5a922c8b 100644 --- a/asf_search/export/export_translators.py +++ b/asf_search/export/export_translators.py @@ -19,19 +19,19 @@ def ASFSearchResults_to_properties_list( # Format dates to match format used by SearchAPI output formats for product in property_list: # S1 date properties are formatted differently from other platforms - is_S1 = product["platform"].upper() in [ - "SENTINEL-1", - "SENTINEL-1B", - "SENTINEL-1A", + is_S1 = product['platform'].upper() in [ + 'SENTINEL-1', + 'SENTINEL-1B', + 'SENTINEL-1A', ] for key, data in product.items(): - if ("date" in key.lower() or "time" in key.lower()) and data is not None: + if ('date' in key.lower() or 'time' in key.lower()) and data is not None: if not is_S1: # Remove trailing zeroes from miliseconds, add Z - if len(data.split(".")) == 2: - d = len(data.split(".")[0]) - data = data[:d] + "Z" - time = datetime.strptime(data, "%Y-%m-%dT%H:%M:%SZ") - product[key] = time.strftime("%Y-%m-%dT%H:%M:%SZ") + if len(data.split('.')) == 2: + d = len(data.split('.')[0]) + data = data[:d] + 'Z' + time = datetime.strptime(data, '%Y-%m-%dT%H:%M:%SZ') + product[key] = time.strftime('%Y-%m-%dT%H:%M:%SZ') return property_list diff --git a/asf_search/export/geojson.py b/asf_search/export/geojson.py index 88434294..4b273eb8 100644 --- a/asf_search/export/geojson.py +++ b/asf_search/export/geojson.py @@ -6,17 +6,15 @@ def results_to_geojson(results): - ASF_LOGGER.info("started translating results to geojson format") + ASF_LOGGER.info('started translating results to geojson format') - if not inspect.isgeneratorfunction(results) and not isinstance( - results, GeneratorType - ): + if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): results = [results] streamer = GeoJSONStreamArray(results) for p in json.JSONEncoder(indent=2, sort_keys=True).iterencode( - {"type": "FeatureCollection", "features": streamer} + {'type': 'FeatureCollection', 'features': streamer} ): yield p @@ -38,15 +36,15 @@ def __len__(self): def streamDicts(self): completed = False for page_idx, page in enumerate(self.results): - ASF_LOGGER.info(f"Streaming {len(page)} products from page {page_idx}") + ASF_LOGGER.info(f'Streaming {len(page)} products from page {page_idx}') completed = page.searchComplete yield from [self.getItem(p) for p in page if p is not None] if not completed: - ASF_LOGGER.warn("Failed to download all results from CMR") + ASF_LOGGER.warn('Failed to download all results from CMR') - ASF_LOGGER.info("Finished streaming geojson results") + ASF_LOGGER.info('Finished streaming geojson results') def getItem(self, p): return p.geojson() diff --git a/asf_search/export/jsonlite.py b/asf_search/export/jsonlite.py index e8e5852d..80aaa75b 100644 --- a/asf_search/export/jsonlite.py +++ b/asf_search/export/jsonlite.py @@ -23,23 +23,13 @@ ("missionName", ["AdditionalAttributes", ("Name", "MISSION_NAME"), "Values", 0]), ] -<<<<<<< HEAD - -def results_to_jsonlite(results): - ASF_LOGGER.info("started translating results to jsonlite format") - - if not inspect.isgeneratorfunction(results) and not isinstance( - results, GeneratorType - ): -======= def results_to_jsonlite(results): ASF_LOGGER.info('started translating results to jsonlite format') if len(results) == 0: yield from json.JSONEncoder(indent=2, sort_keys=True).iterencode({'results': []}) return - + if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): ->>>>>>> master results = [results] streamer = JSONLiteStreamArray(results) @@ -173,11 +163,7 @@ def getItem(self, p): pass try: -<<<<<<< HEAD - p["frameNumber"] = int(p["frameNumber"]) -======= p['frameNumber'] = int(p.get('frameNumber')) ->>>>>>> master except TypeError: pass @@ -228,16 +214,9 @@ def getItem(self, p): if result[key] in ["NA", "NULL"]: result[key] = None -<<<<<<< HEAD if "temporalBaseline" in p.keys() or "perpendicularBaseline" in p.keys(): result["temporalBaseline"] = p["temporalBaseline"] result["perpendicularBaseline"] = p["perpendicularBaseline"] -======= - if 'temporalBaseline' in p.keys(): - result['temporalBaseline'] = p['temporalBaseline'] - if 'perpendicularBaseline' in p.keys(): - result['perpendicularBaseline'] = p['perpendicularBaseline'] ->>>>>>> master if p.get("processingLevel") == "BURST": # is a burst product result["burst"] = p["burst"] diff --git a/asf_search/export/jsonlite2.py b/asf_search/export/jsonlite2.py index f42ffdc5..ef2a8b5f 100644 --- a/asf_search/export/jsonlite2.py +++ b/asf_search/export/jsonlite2.py @@ -5,15 +5,6 @@ from asf_search import ASF_LOGGER from .jsonlite import JSONLiteStreamArray -<<<<<<< HEAD - -def results_to_jsonlite2(results): - ASF_LOGGER.info("started translating results to jsonlite2 format") - - if not inspect.isgeneratorfunction(results) and not isinstance( - results, GeneratorType - ): -======= def results_to_jsonlite2(results): ASF_LOGGER.info('started translating results to jsonlite2 format') @@ -22,7 +13,6 @@ def results_to_jsonlite2(results): return if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): ->>>>>>> master results = [results] streamer = JSONLite2StreamArray(results) @@ -75,15 +65,6 @@ def getItem(self, p): "pge": p["pgeVersion"], } -<<<<<<< HEAD - if "temporalBaseline" in p.keys() or "perpendicularBaseline" in p.keys(): - result["tb"] = p["temporalBaseline"] - result["pb"] = p["perpendicularBaseline"] - - if p.get("burst") is not None: # is a burst product - result["s1b"] = p["burst"] - -======= if 'temporalBaseline' in p.keys(): result['tb'] = p['temporalBaseline'] if 'perpendicularBaseline' in p.keys(): @@ -95,7 +76,6 @@ def getItem(self, p): if p.get('opera') is not None: result['s1o'] = p['opera'] ->>>>>>> master return result def getOutputType(self) -> str: diff --git a/asf_search/export/kml.py b/asf_search/export/kml.py index 70e6b63b..295b7e7f 100644 --- a/asf_search/export/kml.py +++ b/asf_search/export/kml.py @@ -180,21 +180,12 @@ def getItem(self, p): # Helper method for getting additional fields in
        tag def metadata_fields(self, item: Dict): required = { -<<<<<<< HEAD - "Processing type: ": item["processingTypeDisplay"], - "Frame: ": item["frameNumber"], - "Path: ": item["pathNumber"], - "Orbit: ": item["orbit"], - "Start time: ": item["startTime"], - "End time: ": item["stopTime"], -======= 'Processing type: ': item.get('processingTypeDisplay'), 'Frame: ': item.get('frameNumber'), 'Path: ': item.get('pathNumber'), 'Orbit: ': item.get('orbit'), 'Start time: ': item.get('startTime'), 'End time: ': item.get('stopTime'), ->>>>>>> master } optional = {} diff --git a/asf_search/export/metalink.py b/asf_search/export/metalink.py index ca513de9..aa232d51 100644 --- a/asf_search/export/metalink.py +++ b/asf_search/export/metalink.py @@ -6,7 +6,7 @@ def results_to_metalink(results): - ASF_LOGGER.info("Started translating results to metalink format") + ASF_LOGGER.info('Started translating results to metalink format') if inspect.isgeneratorfunction(results) or isinstance(results, GeneratorType): return MetalinkStreamArray(results) @@ -21,7 +21,7 @@ def __init__(self, results): self.header = ( '' '\n' - 'Alaska Satellite Facilityhttp://www.asf.alaska.edu/\n' # noqa F401 + 'Alaska Satellite Facilityhttp://www.asf.alaska.edu/\n' # noqa F401 '' ) @@ -41,54 +41,52 @@ def streamPages(self): completed = False for page_idx, page in enumerate(self.pages): - ASF_LOGGER.info(f"Streaming {len(page)} products from page {page_idx}") + ASF_LOGGER.info(f'Streaming {len(page)} products from page {page_idx}') completed = page.searchComplete - properties_list = ASFSearchResults_to_properties_list( - page, self.get_additional_fields - ) + properties_list = ASFSearchResults_to_properties_list(page, self.get_additional_fields) yield from [self.getItem(p) for p in properties_list] if not completed: - ASF_LOGGER.warn("Failed to download all results from CMR") + ASF_LOGGER.warn('Failed to download all results from CMR') yield self.footer - ASF_LOGGER.info(f"Finished streaming {self.getOutputType()} results") + ASF_LOGGER.info(f'Finished streaming {self.getOutputType()} results') def getOutputType(self) -> str: - return "metalink" + return 'metalink' def getItem(self, p): - file = ETree.Element("file", attrib={"name": p["fileName"]}) - resources = ETree.Element("resources") + file = ETree.Element('file', attrib={'name': p['fileName']}) + resources = ETree.Element('resources') - url = ETree.Element("url", attrib={"type": "http"}) - url.text = p["url"] + url = ETree.Element('url', attrib={'type': 'http'}) + url.text = p['url'] resources.append(url) file.append(resources) - if p.get("md5sum") and p.get("md5sum") != "NA": - verification = ETree.Element("verification") - h = ETree.Element("hash", {"type": "md5"}) - h.text = p["md5sum"] + if p.get('md5sum') and p.get('md5sum') != 'NA': + verification = ETree.Element('verification') + h = ETree.Element('hash', {'type': 'md5'}) + h.text = p['md5sum'] verification.append(h) file.append(verification) - if p["bytes"] and p["bytes"] != "NA": - size = ETree.Element("size") - size.text = str(p["bytes"]) + if p['bytes'] and p['bytes'] != 'NA': + size = ETree.Element('size') + size.text = str(p['bytes']) file.append(size) - return "\n" + (8 * " ") + ETree.tostring(file, encoding="unicode") + return '\n' + (8 * ' ') + ETree.tostring(file, encoding='unicode') def indent(self, elem, level=0): # Only Python 3.9+ has a built-in indent function for element tree. # https://stackoverflow.com/a/33956544 - i = "\n" + level * " " + i = '\n' + level * ' ' if len(elem): if not elem.text or not elem.text.strip(): - elem.text = i + " " + elem.text = i + ' ' if not elem.tail or not elem.tail.strip(): elem.tail = i for elem in elem: diff --git a/asf_search/health/health.py b/asf_search/health/health.py index 5adfaaa9..0db8f483 100644 --- a/asf_search/health/health.py +++ b/asf_search/health/health.py @@ -22,6 +22,4 @@ def health(host: str = None) -> Dict: if host is None: host = asf_search.INTERNAL.CMR_HOST - return json.loads( - requests.get(f"https://{host}{asf_search.INTERNAL.CMR_HEALTH_PATH}").text - ) + return json.loads(requests.get(f'https://{host}{asf_search.INTERNAL.CMR_HEALTH_PATH}').text) diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index 628dc50f..1bcee03d 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -59,10 +59,10 @@ def stack_from_product( stack, warnings = get_baseline_from_stack(reference=reference, stack=stack) stack.searchComplete = is_complete # preserve final outcome of earlier search() - stack.sort(key=lambda product: product.properties["temporalBaseline"]) + stack.sort(key=lambda product: product.properties['temporalBaseline']) for warning in warnings: - ASF_LOGGER.warning(f"{warning}") + ASF_LOGGER.warning(f'{warning}') return stack @@ -99,7 +99,7 @@ def stack_from_id( reference_results.raise_if_incomplete() if len(reference_results) <= 0: - raise ASFSearchError(f"Reference product not found: {reference_id}") + raise ASFSearchError(f'Reference product not found: {reference_id}') reference = reference_results[0] if useSubclass is not None: @@ -108,9 +108,7 @@ def stack_from_id( return reference.stack(opts=opts, useSubclass=useSubclass) -def _cast_results_to_subclass( - stack: ASFSearchResults, ASFProductSubclass: Type[ASFProduct] -): +def _cast_results_to_subclass(stack: ASFSearchResults, ASFProductSubclass: Type[ASFProduct]): """ Converts results from default ASFProduct subclasses to custom ones """ @@ -142,11 +140,9 @@ class MyCustomClass(ASFProduct): try: if isinstance(subclass, type(ASFProduct)): return subclass( - args={"umm": product.umm, "meta": product.meta}, session=product.session + args={'umm': product.umm, 'meta': product.meta}, session=product.session ) except Exception as e: - raise ValueError( - f"Unable to use provided subclass {type(subclass)}, \nError Message: {e}" - ) + raise ValueError(f'Unable to use provided subclass {type(subclass)}, \nError Message: {e}') - raise ValueError(f"Expected ASFProduct subclass constructor, got {type(subclass)}") + raise ValueError(f'Expected ASFProduct subclass constructor, got {type(subclass)}') diff --git a/asf_search/search/campaigns.py b/asf_search/search/campaigns.py index d1541396..7e256257 100644 --- a/asf_search/search/campaigns.py +++ b/asf_search/search/campaigns.py @@ -12,19 +12,19 @@ def campaigns(platform: str) -> List[str]: :return: A list of campaign names for the given platform """ - data = {"include_facets": "true", "provider": "ASF"} + data = {'include_facets': 'true', 'provider': 'ASF'} if platform is not None: - if platform == "UAVSAR": - data["platform[]"] = "G-III" - data["instrument[]"] = "UAVSAR" - elif platform == "AIRSAR": - data["platform[]"] = "DC-8" - data["instrument[]"] = "AIRSAR" - elif platform == "SENTINEL-1 INTERFEROGRAM (BETA)": - data["platform[]"] = "SENTINEL-1A" + if platform == 'UAVSAR': + data['platform[]'] = 'G-III' + data['instrument[]'] = 'UAVSAR' + elif platform == 'AIRSAR': + data['platform[]'] = 'DC-8' + data['instrument[]'] = 'AIRSAR' + elif platform == 'SENTINEL-1 INTERFEROGRAM (BETA)': + data['platform[]'] = 'SENTINEL-1A' else: - data["platform[]"] = platform + data['platform[]'] = platform missions = get_campaigns(data) mission_names = _get_project_names(missions) @@ -44,7 +44,7 @@ def _get_project_names(data: Union[Dict, List]) -> List[str]: output = [] if isinstance(data, Dict): for key, value in data.items(): - if key == "Projects": + if key == 'Projects': return [list(item.values())[0] for item in value] output.extend(_get_project_names(value)) elif isinstance(data, List): diff --git a/asf_search/search/error_reporting.py b/asf_search/search/error_reporting.py index 0ab7e7c2..79a0106c 100644 --- a/asf_search/search/error_reporting.py +++ b/asf_search/search/error_reporting.py @@ -18,18 +18,16 @@ def report_search_error(search_options: ASFSearchOptions, message: str): ) return - user_agent = search_options.session.headers.get("User-Agent") - search_options_list = "\n".join( - [f"\t{option}: {key}" for option, key in dict(search_options).items()] + user_agent = search_options.session.headers.get('User-Agent') + search_options_list = '\n'.join( + [f'\t{option}: {key}' for option, key in dict(search_options).items()] ) - message = f"Error Message: {str(message)}\nUser Agent: {user_agent} \ - \nSearch Options: {{\n{search_options_list}\n}}" + message = f'Error Message: {str(message)}\nUser Agent: {user_agent} \ + \nSearch Options: {{\n{search_options_list}\n}}' response = requests.post( - f"https://{INTERNAL.ERROR_REPORTING_ENDPOINT}", - data={ - "Message": f"This error message and info was automatically generated:\n\n{message}" - }, + f'https://{INTERNAL.ERROR_REPORTING_ENDPOINT}', + data={'Message': f'This error message and info was automatically generated:\n\n{message}'}, ) try: @@ -38,7 +36,8 @@ def report_search_error(search_options: ASFSearchOptions, message: str): logging.error( 'asf-search failed to automatically report an error,' 'if you have any questions email uso@asf.alaska.edu' - f"\nError Text: HTTP {response.status_code}: {response.json()['errors']}") + f"\nError Text: HTTP {response.status_code}: {response.json()['errors']}" + ) return if response.status_code == 200: logging.error( @@ -46,6 +45,6 @@ def report_search_error(search_options: ASFSearchOptions, message: str): 'The asf-search module ecountered an error with CMR,' 'and the following message was automatically reported to ASF:' '\n\n"\nmessage\n"' - "If you have any questions email uso@asf.alaska.edu" + 'If you have any questions email uso@asf.alaska.edu' ) ) diff --git a/asf_search/search/geo_search.py b/asf_search/search/geo_search.py index 6172e10c..b50dcd0b 100644 --- a/asf_search/search/geo_search.py +++ b/asf_search/search/geo_search.py @@ -24,9 +24,7 @@ def geo_search( minFaradayRotation: float = None, flightDirection: str = None, flightLine: str = None, - frame: Union[ - int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] - ] = None, + frame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, granule_list: Union[str, Sequence[str]] = None, groupID: Union[str, Sequence[str]] = None, insarStackId: str = None, @@ -159,9 +157,7 @@ def geo_search( """ kwargs = locals() - data = dict( - (k, v) for k, v in kwargs.items() if k not in ["host", "opts"] and v is not None - ) + data = dict((k, v) for k, v in kwargs.items() if k not in ['host', 'opts'] and v is not None) opts = ASFSearchOptions() if opts is None else copy(opts) opts.merge_args(**data) diff --git a/asf_search/search/granule_search.py b/asf_search/search/granule_search.py index 5122d0c7..d409a1ca 100644 --- a/asf_search/search/granule_search.py +++ b/asf_search/search/granule_search.py @@ -6,9 +6,7 @@ from asf_search.ASFSearchResults import ASFSearchResults -def granule_search( - granule_list: Sequence[str], opts: ASFSearchOptions = None -) -> ASFSearchResults: +def granule_search(granule_list: Sequence[str], opts: ASFSearchOptions = None) -> ASFSearchResults: """ Performs a granule name search using the ASF SearchAPI diff --git a/asf_search/search/product_search.py b/asf_search/search/product_search.py index c60ebd69..10e9bd22 100644 --- a/asf_search/search/product_search.py +++ b/asf_search/search/product_search.py @@ -6,9 +6,7 @@ from asf_search.ASFSearchResults import ASFSearchResults -def product_search( - product_list: Sequence[str], opts: ASFSearchOptions = None -) -> ASFSearchResults: +def product_search(product_list: Sequence[str], opts: ASFSearchOptions = None) -> ASFSearchResults: """ Performs a product ID search using the ASF SearchAPI diff --git a/asf_search/search/search.py b/asf_search/search/search.py index b466f326..cc60fcb9 100644 --- a/asf_search/search/search.py +++ b/asf_search/search/search.py @@ -25,9 +25,7 @@ def search( minFaradayRotation: float = None, flightDirection: str = None, flightLine: str = None, - frame: Union[ - int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] - ] = None, + frame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, granule_list: Union[str, Sequence[str]] = None, groupID: Union[str, Sequence[str]] = None, insarStackId: str = None, @@ -164,9 +162,7 @@ def search( `asf_search.ASFSearchResults` (list of search results of subclass ASFProduct) """ kwargs = locals() - data = dict( - (k, v) for k, v in kwargs.items() if k not in ["host", "opts"] and v is not None - ) + data = dict((k, v) for k, v in kwargs.items() if k not in ['host', 'opts'] and v is not None) opts = ASFSearchOptions() if opts is None else copy(opts) opts.merge_args(**data) @@ -176,7 +172,7 @@ def search( # The last page will be marked as complete if results sucessful perf = time.time() for page in search_generator(opts=opts): - ASF_LOGGER.warning(f"Page Time Elapsed {time.time() - perf}") + ASF_LOGGER.warning(f'Page Time Elapsed {time.time() - perf}') results.extend(page) results.searchComplete = page.searchComplete results.searchOptions = page.searchOptions @@ -187,8 +183,6 @@ def search( try: results.sort(key=lambda p: p.get_sort_keys(), reverse=True) except TypeError as exc: - ASF_LOGGER.warning( - f"Failed to sort final results, leaving results unsorted. Reason: {exc}" - ) + ASF_LOGGER.warning(f'Failed to sort final results, leaving results unsorted. Reason: {exc}') return results diff --git a/asf_search/search/search_count.py b/asf_search/search/search_count.py index c802bf79..22cc744d 100644 --- a/asf_search/search/search_count.py +++ b/asf_search/search/search_count.py @@ -25,9 +25,7 @@ def search_count( minFaradayRotation: float = None, flightDirection: str = None, flightLine: str = None, - frame: Union[ - int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] - ] = None, + frame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, granule_list: Union[str, Sequence[str]] = None, groupID: Union[str, Sequence[str]] = None, insarStackId: str = None, @@ -61,8 +59,8 @@ def search_count( ) -> int: # Create a kwargs dict, that's all of the 'not None' items, and merge it with opts: kwargs = locals() - opts = ASFSearchOptions() if kwargs["opts"] is None else copy(opts) - del kwargs["opts"] + opts = ASFSearchOptions() if kwargs['opts'] is None else copy(opts) + del kwargs['opts'] kwargs = dict((k, v) for k, v in kwargs.items() if v is not None) kw_opts = ASFSearchOptions(**kwargs) @@ -72,18 +70,14 @@ def search_count( preprocess_opts(opts) - url = "/".join( - s.strip("/") for s in [f"https://{opts.host}", f"{INTERNAL.CMR_GRANULE_PATH}"] - ) + url = '/'.join(s.strip('/') for s in [f'https://{opts.host}', f'{INTERNAL.CMR_GRANULE_PATH}']) count = 0 for query in build_subqueries(opts): translated_opts = translate_opts(query) - idx = translated_opts.index(("page_size", INTERNAL.CMR_PAGE_SIZE)) - translated_opts[idx] = ("page_size", 0) + idx = translated_opts.index(('page_size', INTERNAL.CMR_PAGE_SIZE)) + translated_opts[idx] = ('page_size', 0) - response = get_page( - session=opts.session, url=url, translated_opts=translated_opts - ) - count += response.json()["hits"] + response = get_page(session=opts.session, url=url, translated_opts=translated_opts) + count += response.json()['hits'] return count diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index bdea2e9b..550891f7 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -51,9 +51,7 @@ def search_generator( minFaradayRotation: float = None, flightDirection: str = None, flightLine: str = None, - frame: Union[ - int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]] - ] = None, + frame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None, granule_list: Union[str, Sequence[str]] = None, groupID: Union[str, Sequence[str]] = None, insarStackId: str = None, @@ -191,8 +189,8 @@ def search_generator( # Create a kwargs dict, that's all of the 'not None' items, and merge it with opts: kwargs = locals() - opts = ASFSearchOptions() if kwargs["opts"] is None else copy(opts) - del kwargs["opts"] + opts = ASFSearchOptions() if kwargs['opts'] is None else copy(opts) + del kwargs['opts'] kwargs = dict((k, v) for k, v in kwargs.items() if v is not None) kw_opts = ASFSearchOptions(**kwargs) @@ -200,47 +198,43 @@ def search_generator( # Anything passed in as kwargs has priority over anything in opts: opts.merge_args(**dict(kw_opts)) - maxResults = opts.pop("maxResults", None) + maxResults = opts.pop('maxResults', None) if maxResults is not None and ( - getattr(opts, "granule_list", False) or getattr(opts, "product_list", False) + getattr(opts, 'granule_list', False) or getattr(opts, 'product_list', False) ): - raise ValueError("Cannot use maxResults along with product_list/granule_list.") + raise ValueError('Cannot use maxResults along with product_list/granule_list.') - ASF_LOGGER.debug(f"SEARCH: preprocessing opts: {opts}") + ASF_LOGGER.debug(f'SEARCH: preprocessing opts: {opts}') preprocess_opts(opts) - ASF_LOGGER.debug(f"SEARCH: preprocessed opts: {opts}") + ASF_LOGGER.debug(f'SEARCH: preprocessed opts: {opts}') - ASF_LOGGER.info(f"SEARCH: Using search opts {opts}") + ASF_LOGGER.info(f'SEARCH: Using search opts {opts}') - url = "/".join( - s.strip("/") for s in [f"https://{opts.host}", f"{INTERNAL.CMR_GRANULE_PATH}"] - ) + url = '/'.join(s.strip('/') for s in [f'https://{opts.host}', f'{INTERNAL.CMR_GRANULE_PATH}']) total = 0 queries = build_subqueries(opts) ASF_LOGGER.info(f'SEARCH: Using cmr endpoint: "{url}"') - ASF_LOGGER.debug(f"SEARCH: Built {len(queries)} subqueries") + ASF_LOGGER.debug(f'SEARCH: Built {len(queries)} subqueries') for subquery_idx, query in enumerate(queries): - ASF_LOGGER.info( - f"SUBQUERY {subquery_idx + 1}: Beginning subquery with opts: {query}" - ) + ASF_LOGGER.info(f'SUBQUERY {subquery_idx + 1}: Beginning subquery with opts: {query}') - ASF_LOGGER.debug(f"TRANSLATION: Translating subquery:\n{query}") + ASF_LOGGER.debug(f'TRANSLATION: Translating subquery:\n{query}') translated_opts = translate_opts(query) - ASF_LOGGER.debug( - f"TRANSLATION: Subquery translated to cmr keywords:\n{translated_opts}" - ) - cmr_search_after_header = "" + ASF_LOGGER.debug(f'TRANSLATION: Subquery translated to cmr keywords:\n{translated_opts}') + cmr_search_after_header = '' subquery_count = 0 page_number = 1 while cmr_search_after_header is not None: try: ASF_LOGGER.debug(f'SUBQUERY {subquery_idx + 1}: Fetching page {page_number}') - items, subquery_max_results, cmr_search_after_header = query_cmr(opts.session, url, translated_opts, subquery_count) + items, subquery_max_results, cmr_search_after_header = query_cmr( + opts.session, url, translated_opts, subquery_count + ) except (ASFSearchError, CMRIncompleteError) as exc: message = str(exc) ASF_LOGGER.error(message) @@ -249,41 +243,41 @@ def search_generator( # If it's a CMRIncompleteError, we can just stop here and return what we have # It's up to the user to call .raise_if_incomplete() if they're using the # generator directly. - if type(exc) == CMRIncompleteError: + if isinstance(exc, CMRIncompleteError): return else: raise - ASF_LOGGER.debug(f'SUBQUERY {subquery_idx + 1}: Page {page_number} fetched, returned {len(items)} items.') + ASF_LOGGER.debug( + f'SUBQUERY {subquery_idx + 1}: Page {page_number} fetched, returned {len(items)} items.' + ) opts.session.headers.update({'CMR-Search-After': cmr_search_after_header}) perf = time.time() - last_page = process_page(items, maxResults, subquery_max_results, total, subquery_count, opts) - ASF_LOGGER.warning(f"Page Processing Time {time.time() - perf}") + last_page = process_page( + items, maxResults, subquery_max_results, total, subquery_count, opts + ) + ASF_LOGGER.warning(f'Page Processing Time {time.time() - perf}') subquery_count += len(last_page) total += len(last_page) - last_page.searchComplete = ( - subquery_count == subquery_max_results or total == maxResults - ) + last_page.searchComplete = subquery_count == subquery_max_results or total == maxResults yield last_page if last_page.searchComplete: if total == maxResults: # the user has as many results as they wanted - ASF_LOGGER.info( - f"SEARCH COMPLETE: MaxResults ({maxResults}) reached" - ) - opts.session.headers.pop("CMR-Search-After", None) + ASF_LOGGER.info(f'SEARCH COMPLETE: MaxResults ({maxResults}) reached') + opts.session.headers.pop('CMR-Search-After', None) return else: # or we've gotten all possible results for this subquery ASF_LOGGER.info( - f"SUBQUERY {subquery_idx + 1} COMPLETE: results exhausted for subquery" + f'SUBQUERY {subquery_idx + 1} COMPLETE: results exhausted for subquery' ) cmr_search_after_header = None page_number += 1 - opts.session.headers.pop("CMR-Search-After", None) + opts.session.headers.pop('CMR-Search-After', None) - ASF_LOGGER.info(f"SEARCH COMPLETE: results exhausted for search opts {opts}") + ASF_LOGGER.info(f'SEARCH COMPLETE: results exhausted for search opts {opts}') @retry( @@ -298,14 +292,12 @@ def query_cmr( translated_opts: Dict, sub_query_count: int, ): - response = get_page( - session=session, url=url, translated_opts=translated_opts - ) + response = get_page(session=session, url=url, translated_opts=translated_opts) perf = time.time() items = [as_ASFProduct(f, session=session) for f in response.json()['items']] - ASF_LOGGER.warning(f"Product Subclassing Time {time.time() - perf}") - hits: int = response.json()['hits'] # total count of products given search opts + ASF_LOGGER.warning(f'Product Subclassing Time {time.time() - perf}') + hits: int = response.json()['hits'] # total count of products given search opts # 9-10 per process # 3.9-5 per process # sometimes CMR returns results with the wrong page size @@ -316,7 +308,7 @@ def query_cmr( f'got {len(items)}' ) - return items, hits, response.headers.get("CMR-Search-After", None) + return items, hits, response.headers.get('CMR-Search-After', None) def process_page( @@ -332,19 +324,21 @@ def process_page( items[: min(subquery_max_results - subquery_count, len(items))], opts=opts ) else: - last_page = ASFSearchResults( - items[: min(max_results - total, len(items))], opts=opts - ) + last_page = ASFSearchResults(items[: min(max_results - total, len(items))], opts=opts) return last_page -@retry(reraise=True, - retry=retry_if_exception_type(ASFSearch5xxError), - wait=wait_exponential(multiplier=1, min=3, max=10), # Wait 2^x * 1 starting with 3 seconds, max 10 seconds between retries - stop=stop_after_attempt(3), - ) +@retry( + reraise=True, + retry=retry_if_exception_type(ASFSearch5xxError), + wait=wait_exponential( + multiplier=1, min=3, max=10 + ), # Wait 2^x * 1 starting with 3 seconds, max 10 seconds between retries + stop=stop_after_attempt(3), +) def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: from asf_search.constants.INTERNAL import CMR_TIMEOUT + perf = time.time() try: response = session.post(url=url, data=translated_opts, timeout=CMR_TIMEOUT) @@ -356,9 +350,11 @@ def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: if 500 <= response.status_code <= 599: raise ASFSearch5xxError(error_message) from exc except ReadTimeout as exc: - raise ASFSearchError(f'Connection Error (Timeout): CMR took too long to respond. Set asf constant "asf_search.constants.INTERNAL.CMR_TIMEOUT" to increase. ({url=}, timeout={CMR_TIMEOUT})') from exc + raise ASFSearchError( + f'Connection Error (Timeout): CMR took too long to respond. Set asf constant "asf_search.constants.INTERNAL.CMR_TIMEOUT" to increase. ({url=}, timeout={CMR_TIMEOUT})' + ) from exc - ASF_LOGGER.warning(f"Query Time Elapsed {time.time() - perf}") + ASF_LOGGER.warning(f'Query Time Elapsed {time.time() - perf}') return response @@ -386,25 +382,21 @@ def wrap_wkt(opts: ASFSearchOptions): def set_default_dates(opts: ASFSearchOptions): if opts.start is not None and isinstance(opts.start, str): - opts.start = dateparser.parse( - opts.start, settings={"RETURN_AS_TIMEZONE_AWARE": True} - ) + opts.start = dateparser.parse(opts.start, settings={'RETURN_AS_TIMEZONE_AWARE': True}) if opts.end is not None and isinstance(opts.end, str): - opts.end = dateparser.parse( - opts.end, settings={"RETURN_AS_TIMEZONE_AWARE": True} - ) + opts.end = dateparser.parse(opts.end, settings={'RETURN_AS_TIMEZONE_AWARE': True}) # If both are used, make sure they're in the right order: if opts.start is not None and opts.end is not None: if opts.start > opts.end: ASF_LOGGER.warning( - f"Start date ({opts.start}) is after end date ({opts.end}). Switching the two." + f'Start date ({opts.start}) is after end date ({opts.end}). Switching the two.' ) opts.start, opts.end = opts.end, opts.start # Can't do this sooner, since you need to compare start vs end: if opts.start is not None: - opts.start = opts.start.strftime("%Y-%m-%dT%H:%M:%SZ") + opts.start = opts.start.strftime('%Y-%m-%dT%H:%M:%SZ') if opts.end is not None: - opts.end = opts.end.strftime("%Y-%m-%dT%H:%M:%SZ") + opts.end = opts.end.strftime('%Y-%m-%dT%H:%M:%SZ') def set_platform_alias(opts: ASFSearchOptions): @@ -412,26 +404,26 @@ def set_platform_alias(opts: ASFSearchOptions): if opts.platform is not None: plat_aliases = { # Groups: - "S1": ["SENTINEL-1A", "SENTINEL-1B"], - "SENTINEL-1": ["SENTINEL-1A", "SENTINEL-1B"], - "SENTINEL": ["SENTINEL-1A", "SENTINEL-1B"], - "ERS": ["ERS-1", "ERS-2"], - "SIR-C": ["STS-59", "STS-68"], + 'S1': ['SENTINEL-1A', 'SENTINEL-1B'], + 'SENTINEL-1': ['SENTINEL-1A', 'SENTINEL-1B'], + 'SENTINEL': ['SENTINEL-1A', 'SENTINEL-1B'], + 'ERS': ['ERS-1', 'ERS-2'], + 'SIR-C': ['STS-59', 'STS-68'], # Singles / Aliases: - "R1": ["RADARSAT-1"], - "E1": ["ERS-1"], - "E2": ["ERS-2"], - "J1": ["JERS-1"], - "A3": ["ALOS"], - "AS": ["DC-8"], - "AIRSAR": ["DC-8"], - "SS": ["SEASAT 1"], - "SEASAT": ["SEASAT 1"], - "SA": ["SENTINEL-1A"], - "SB": ["SENTINEL-1B"], - "SP": ["SMAP"], - "UA": ["G-III"], - "UAVSAR": ["G-III"], + 'R1': ['RADARSAT-1'], + 'E1': ['ERS-1'], + 'E2': ['ERS-2'], + 'J1': ['JERS-1'], + 'A3': ['ALOS'], + 'AS': ['DC-8'], + 'AIRSAR': ['DC-8'], + 'SS': ['SEASAT 1'], + 'SEASAT': ['SEASAT 1'], + 'SA': ['SENTINEL-1A'], + 'SB': ['SENTINEL-1B'], + 'SP': ['SMAP'], + 'UA': ['G-III'], + 'UAVSAR': ['G-III'], } platform_list = [] for plat in opts.platform: @@ -442,6 +434,8 @@ def set_platform_alias(opts: ASFSearchOptions): platform_list.append(plat) opts.platform = list(set(platform_list)) + + _dataset_collection_items = dataset_collections.items() @@ -478,21 +472,22 @@ def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: # If the platform exists, try to match it platform = _get_platform(item=item) if ASFProductType.ARIAS1GUNWProduct._is_subclass(item=item): - return dataset_to_product_types.get("ARIA S1 GUNW")(item, session=session) + return dataset_to_product_types.get('ARIA S1 GUNW')(item, session=session) elif (subclass := dataset_to_product_types.get(platform)) is not None: return subclass(item, session=session) output = ASFProduct(item, session=session) - granule_concept_id = output.meta.get("concept-id", "Missing Granule Concept ID") + granule_concept_id = output.meta.get('concept-id', 'Missing Granule Concept ID') fileID = output.properties.get( - "fileID", output.properties.get("sceneName", "fileID and sceneName Missing") + 'fileID', output.properties.get('sceneName', 'fileID and sceneName Missing') ) ASF_LOGGER.warning( f'Failed to find corresponding ASFProduct subclass for \ Product: "{fileID}", Granule Concept ID: "{granule_concept_id}", \ - default to "ASFProduct"') + default to "ASFProduct"' + ) return output @@ -502,13 +497,11 @@ def _get_product_type_key(item: Dict) -> str: 2. platform_shortName (Fallback) - special case: Aria S1 GUNW """ - collection_shortName = ASFProduct.umm_get( - item["umm"], "CollectionReference", "ShortName" - ) + collection_shortName = ASFProduct.umm_get(item['umm'], 'CollectionReference', 'ShortName') if collection_shortName is None: if ASFProductType.ARIAS1GUNWProduct._is_subclass(item=item): - return "ARIA S1 GUNW" + return 'ARIA S1 GUNW' platform = _get_platform(item=item) return platform @@ -517,31 +510,31 @@ def _get_product_type_key(item: Dict) -> str: def _get_platform(item: Dict): - return ASFProduct.umm_get(item["umm"], "Platforms", 0, "ShortName") + return ASFProduct.umm_get(item['umm'], 'Platforms', 0, 'ShortName') # Maps datasets from DATASET.py and collection/platform shortnames to ASFProduct subclasses dataset_to_product_types = { - "SENTINEL-1": ASFProductType.S1Product, - "OPERA-S1": ASFProductType.OPERAS1Product, - "OPERA-S1-CALVAL": ASFProductType.OPERAS1Product, - "SLC-BURST": ASFProductType.S1BurstProduct, - "ALOS": ASFProductType.ALOSProduct, - "SIR-C": ASFProductType.SIRCProduct, - "STS-59": ASFProductType.SIRCProduct, - "STS-68": ASFProductType.SIRCProduct, - "ARIA S1 GUNW": ASFProductType.ARIAS1GUNWProduct, - "SMAP": ASFProductType.SMAPProduct, - "UAVSAR": ASFProductType.UAVSARProduct, - "G-III": ASFProductType.UAVSARProduct, - "RADARSAT-1": ASFProductType.RADARSATProduct, - "ERS": ASFProductType.ERSProduct, - "ERS-1": ASFProductType.ERSProduct, - "ERS-2": ASFProductType.ERSProduct, - "JERS-1": ASFProductType.JERSProduct, - "AIRSAR": ASFProductType.AIRSARProduct, - "DC-8": ASFProductType.AIRSARProduct, - "SEASAT": ASFProductType.SEASATProduct, - "SEASAT 1": ASFProductType.SEASATProduct, - "NISAR": ASFProductType.NISARProduct, + 'SENTINEL-1': ASFProductType.S1Product, + 'OPERA-S1': ASFProductType.OPERAS1Product, + 'OPERA-S1-CALVAL': ASFProductType.OPERAS1Product, + 'SLC-BURST': ASFProductType.S1BurstProduct, + 'ALOS': ASFProductType.ALOSProduct, + 'SIR-C': ASFProductType.SIRCProduct, + 'STS-59': ASFProductType.SIRCProduct, + 'STS-68': ASFProductType.SIRCProduct, + 'ARIA S1 GUNW': ASFProductType.ARIAS1GUNWProduct, + 'SMAP': ASFProductType.SMAPProduct, + 'UAVSAR': ASFProductType.UAVSARProduct, + 'G-III': ASFProductType.UAVSARProduct, + 'RADARSAT-1': ASFProductType.RADARSATProduct, + 'ERS': ASFProductType.ERSProduct, + 'ERS-1': ASFProductType.ERSProduct, + 'ERS-2': ASFProductType.ERSProduct, + 'JERS-1': ASFProductType.JERSProduct, + 'AIRSAR': ASFProductType.AIRSARProduct, + 'DC-8': ASFProductType.AIRSARProduct, + 'SEASAT': ASFProductType.SEASATProduct, + 'SEASAT 1': ASFProductType.SEASATProduct, + 'NISAR': ASFProductType.NISARProduct, } diff --git a/examples/hello_world.py b/examples/hello_world.py index e499a566..0f301d6b 100644 --- a/examples/hello_world.py +++ b/examples/hello_world.py @@ -5,83 +5,86 @@ import json import asf_search as asf -print('='*80) +print('=' * 80) print('Constants') print(f'asf.BEAMMODE.IW: {asf.BEAMMODE.IW}') print(f'asf.POLARIZATION.HH_HV: {asf.POLARIZATION.HH_HV}') print(f'asf.PLATFORM.SENTINEL1: {asf.PLATFORM.SENTINEL1}') -print('='*80) +print('=' * 80) print(f'Health check: {json.dumps(asf.health(), indent=2)}') -print('='*80) +print('=' * 80) results = asf.search(platform=[asf.PLATFORM.SENTINEL1], maxResults=2) print(f'Basic search example: {results}') -print('='*80) +print('=' * 80) results = asf.granule_search(['ALPSRS279162400', 'ALPSRS279162200']) print(f'Granule search example: {results}') -print('='*80) +print('=' * 80) results = asf.product_search(['ALAV2A279102730', 'ALAV2A279133150']) print(f'Product search example: {results}') -print('='*80) +print('=' * 80) wkt = 'POLYGON((-135.7 58.2,-136.6 58.1,-135.8 56.9,-134.6 56.1,-134.9 58.0,-135.7 58.2))' results = asf.geo_search(platform=[asf.PLATFORM.SENTINEL1], intersectsWith=wkt, maxResults=2) print(f'Geographic search example: {results}') -print('='*80) +print('=' * 80) results = asf.search( platform=[asf.PLATFORM.SENTINEL1], frame=[100, 150, (200, 205)], relativeOrbit=[100, 105, (110, 115)], - processingLevel=[asf.PRODUCT_TYPE.SLC]) + processingLevel=[asf.PRODUCT_TYPE.SLC], +) print(f'Path/frame/platform/product type example: {results}') -print('='*80) -results = asf.stack_from_id('S1B_WV_SLC__1SSV_20210126T234925_20210126T235632_025332_030462_C733-SLC') +print('=' * 80) +results = asf.stack_from_id( + 'S1B_WV_SLC__1SSV_20210126T234925_20210126T235632_025332_030462_C733-SLC' +) print(f'Baseline stack search example, ephemeris-based: {results}') -print('='*80) +print('=' * 80) try: results = asf.stack_from_id('nonexistent-scene') except asf.ASFSearchError as e: print(f'Stacking a non-existent scene throws an exception: {e}') -print('='*80) +print('=' * 80) try: results = asf.stack_from_id('UA_atchaf_06309_21024_020_210401_L090_CX_01-PROJECTED') except asf.ASFBaselineError as e: print(f'Not everything can be stacked: {e}') -print('='*80) +print('=' * 80) results = asf.stack_from_id('ALPSRP279071390-RTC_HI_RES') print(f'Baseline stack search example, pre-calculated: {results}') -print('='*80) +print('=' * 80) results = results[0].stack() print(f'Baseline stacks can also be made from an ASFProduct: {results}') -print('='*80) +print('=' * 80) print(f'ASFSearchResults work like lists: {results[3:5]}') -print('='*80) +print('=' * 80) print(f'ASFSearchResults serializes to geojson: {results[3:5]}') -print('='*80) +print('=' * 80) product = results[2] print(f'ASFProduct serializes to geojson: {product}') -print('='*80) -wkt = 'POLYGON((-160 65,-150 65,-160 60,-150 60,-160 65))' # Self-intersecting bowtie +print('=' * 80) +wkt = 'POLYGON((-160 65,-150 65,-160 60,-150 60,-160 65))' # Self-intersecting bowtie try: results = asf.geo_search(platform=[asf.PLATFORM.SENTINEL1], intersectsWith=wkt) except asf.ASFWKTError as e: print(f'Exception example: {e}') -print('='*80) +print('=' * 80) print('A few more exception examples:') try: asf.search(offNadirAngle=[tuple([1])]) @@ -100,11 +103,11 @@ except ValueError as e: print(f'Tuple non-numeric max: {e}') try: - asf.search(offNadirAngle=[(float("NaN"), 2)]) + asf.search(offNadirAngle=[(float('NaN'), 2)]) except ValueError as e: print(f'Tuple non-finite min: {e}') try: - asf.search(offNadirAngle=[1, (float("Inf"))]) + asf.search(offNadirAngle=[1, (float('Inf'))]) except ValueError as e: print(f'Tuple non-finite max: {e}') try: @@ -112,7 +115,7 @@ except ValueError as e: print(f'Tuple min > max: {e}') try: - asf.search(offNadirAngle=[float("Inf")]) + asf.search(offNadirAngle=[float('Inf')]) except ValueError as e: print(f'Bare value non-finite: {e}') try: diff --git a/pyproject.toml b/pyproject.toml index 6e60fdae..e3c391bf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,3 +10,11 @@ build-backend = "setuptools.build_meta" # "UserWarning: Unknown distribution option: 'use_scm_version'" # if setuptools_scm isn't installed when setup.py is called: [tool.setuptools_scm] + +[tool.ruff] +line-length = 100 +fix = true + +[tool.ruff.format] +# Prefer single quotes over double quotes. +quote-style = "single" diff --git a/setup.py b/setup.py index dff08b72..d5a82a73 100644 --- a/setup.py +++ b/setup.py @@ -1,75 +1,74 @@ """asf_search setuptools configuration""" + from setuptools import find_packages, setup requirements = [ - "requests", - "shapely", - "pytz", - "importlib_metadata", - "numpy", - "dateparser", - "python-dateutil", - "tenacity == 8.2.2", + 'requests', + 'shapely', + 'pytz', + 'importlib_metadata', + 'numpy', + 'dateparser', + 'python-dateutil', + 'tenacity == 8.2.2', ] test_requirements = [ - "pytest==8.1.1", - "pytest-automation==3.0.0", - "pytest-cov", - "pytest-xdist", - "coverage", - "requests-mock", - "nbformat", - "nbconvert", - "ipykernel", + 'pytest==8.1.1', + 'pytest-automation==3.0.0', + 'pytest-cov', + 'pytest-xdist', + 'coverage', + 'requests-mock', + 'nbformat', + 'nbconvert', + 'ipykernel', ] extra_requirements = [ - "remotezip>=0.10.0", - "ciso8601", + 'remotezip>=0.10.0', + 'ciso8601', ] -with open("README.md", "r") as readme_file: +with open('README.md', 'r') as readme_file: readme = readme_file.read() setup( - name="asf_search", + name='asf_search', # version=Declared in pyproject.toml, through "[tool.setuptools_scm]" - author="Alaska Satellite Facility Discovery Team", - author_email="uaf-asf-discovery@alaska.edu", + author='Alaska Satellite Facility Discovery Team', + author_email='uaf-asf-discovery@alaska.edu', description="Python wrapper for ASF's SearchAPI", long_description=readme, - long_description_content_type="text/markdown", - url="https://github.com/asfadmin/Discovery-asf_search.git", - project_urls={ - 'Documentation': 'https://docs.asf.alaska.edu/asf_search/basics/' - }, - packages=find_packages(exclude=["tests.*", "tests", "examples.*", "examples"]), + long_description_content_type='text/markdown', + url='https://github.com/asfadmin/Discovery-asf_search.git', + project_urls={'Documentation': 'https://docs.asf.alaska.edu/asf_search/basics/'}, + packages=find_packages(exclude=['tests.*', 'tests', 'examples.*', 'examples']), package_dir={'asf_search': 'asf_search'}, include_package_data=True, python_requires='>=3.8', install_requires=requirements, - extras_require={ "test": test_requirements, "extras": extra_requirements}, + extras_require={'test': test_requirements, 'extras': extra_requirements}, license='BSD', license_files=('LICENSE',), classifiers=[ - "Development Status :: 5 - Production/Stable", - "License :: OSI Approved :: BSD License", - "Operating System :: OS Independent", - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Topic :: Software Development", - "Topic :: Scientific/Engineering :: Atmospheric Science", - "Topic :: Scientific/Engineering :: GIS", - "Topic :: Scientific/Engineering :: Hydrology", - "Topic :: Utilities" + 'Development Status :: 5 - Production/Stable', + 'License :: OSI Approved :: BSD License', + 'Operating System :: OS Independent', + 'Intended Audience :: Developers', + 'Intended Audience :: Science/Research', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3 :: Only', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', + 'Topic :: Software Development', + 'Topic :: Scientific/Engineering :: Atmospheric Science', + 'Topic :: Scientific/Engineering :: GIS', + 'Topic :: Scientific/Engineering :: Hydrology', + 'Topic :: Utilities', ], ) diff --git a/tests/ASFProduct/test_ASFProduct.py b/tests/ASFProduct/test_ASFProduct.py index 42214a2f..13922f37 100644 --- a/tests/ASFProduct/test_ASFProduct.py +++ b/tests/ASFProduct/test_ASFProduct.py @@ -2,7 +2,13 @@ import pytest import unittest -from asf_search import ASFProduct, ASFSearchResults, ASFSearchOptions, ASFSession, FileDownloadType +from asf_search import ( + ASFProduct, + ASFSearchResults, + ASFSearchOptions, + ASFSession, + FileDownloadType, +) from unittest.mock import patch from shapely.geometry import shape from shapely.ops import orient @@ -11,6 +17,7 @@ from asf_search.search.search_generator import as_ASFProduct + def run_test_ASFProduct(product_json): if product_json is None: product = ASFProduct() @@ -25,21 +32,22 @@ def run_test_ASFProduct(product_json): product = as_ASFProduct(product_json, ASFSession()) geojson = product.geojson() - + if geojson['geometry']['coordinates'] is not None: expected_shape = orient(shape(product_json['geometry'])) - output_shape = orient(shape(geojson['geometry'])) - assert(output_shape.equals(expected_shape)) + output_shape = orient(shape(geojson['geometry'])) + assert output_shape.equals(expected_shape) elif product.meta != {}: assert product.properties == product_json['properties'] assert product.geometry == product_json['geometry'] - assert(product.umm == product_json["umm"]) - assert(product.meta == product_json["meta"]) + assert product.umm == product_json['umm'] + assert product.meta == product_json['meta'] + def run_test_stack(reference, pre_processed_stack, processed_stack): product = as_ASFProduct(reference, ASFSession()) - + with patch('asf_search.baseline_search.search') as search_mock: temp = ASFSearchResults([as_ASFProduct(prod, ASFSession()) for prod in pre_processed_stack]) for idx, prod in enumerate(temp): @@ -47,18 +55,29 @@ def run_test_stack(reference, pre_processed_stack, processed_stack): search_mock.return_value = temp stack = product.stack() - stack = [ - product for product in stack if product.properties['temporalBaseline'] != None and product.properties['perpendicularBaseline'] != None - ] + product + for product in stack + if product.properties['temporalBaseline'] != None + and product.properties['perpendicularBaseline'] != None + ] + + for idx, secondary in enumerate(stack): + if idx > 0: + assert ( + secondary.properties['temporalBaseline'] + >= stack[idx - 1].properties['temporalBaseline'] + ) + + assert ( + secondary.properties['temporalBaseline'] + == processed_stack[idx]['properties']['temporalBaseline'] + ) + assert ( + secondary.properties['perpendicularBaseline'] + == processed_stack[idx]['properties']['perpendicularBaseline'] + ) - for(idx, secondary) in enumerate(stack): - - if(idx > 0): - assert(secondary.properties['temporalBaseline'] >= stack[idx - 1].properties['temporalBaseline']) - - assert(secondary.properties['temporalBaseline'] == processed_stack[idx]['properties']['temporalBaseline']) - assert(secondary.properties['perpendicularBaseline'] == processed_stack[idx]['properties']['perpendicularBaseline']) def run_test_product_get_stack_options(reference, options): product = as_ASFProduct(reference, ASFSession()) @@ -67,6 +86,7 @@ def run_test_product_get_stack_options(reference, options): product_options = dict(product.get_stack_opts()) assert product_options == dict(expected_options) + def run_test_ASFProduct_download(reference, filename, filetype, additional_urls): product = as_ASFProduct(reference, ASFSession()) product.properties['additionalUrls'] = additional_urls @@ -75,13 +95,13 @@ def run_test_ASFProduct_download(reference, filename, filetype, additional_urls) resp.status_code = 200 mock_get.return_value = resp resp.iter_content = lambda chunk_size: [] - - with patch('builtins.open', unittest.mock.mock_open()) as m: + + with patch('builtins.open', unittest.mock.mock_open()) as m: if filename != None and ( - (filetype == FileDownloadType.ADDITIONAL_FILES and len(additional_urls) > 1) + (filetype == FileDownloadType.ADDITIONAL_FILES and len(additional_urls) > 1) or (filetype == FileDownloadType.ALL_FILES and len(additional_urls) > 0) ): with pytest.warns(Warning): product.download('./', filename=filename, fileType=filetype) else: - product.download('./', filename=filename, fileType=filetype) \ No newline at end of file + product.download('./', filename=filename, fileType=filetype) diff --git a/tests/ASFSearchOptions/test_ASFSearchOptions.py b/tests/ASFSearchOptions/test_ASFSearchOptions.py index 2c41d077..0ac863bb 100644 --- a/tests/ASFSearchOptions/test_ASFSearchOptions.py +++ b/tests/ASFSearchOptions/test_ASFSearchOptions.py @@ -4,22 +4,27 @@ from asf_search.ASFSearchOptions.validator_map import validate, validator_map from pytest import raises + def run_test_validator_map_validate(key, value, output): if key not in list(validator_map.keys()): - with raises(KeyError) as keyerror: validate(key, value) - if key in [validator_key.lower() for validator_key in list(validator_map.keys()) if key not in config.keys()]: - assert "Did you mean" in str(keyerror.value) + if key in [ + validator_key.lower() + for validator_key in list(validator_map.keys()) + if key not in config.keys() + ]: + assert 'Did you mean' in str(keyerror.value) return assert validate(key, value) == output + def run_test_ASFSearchOptions_validator(validator_name, param, output, error): validator = getattr(validators, validator_name) - + if error == None: assert output == validator(param) else: @@ -27,36 +32,49 @@ def run_test_ASFSearchOptions_validator(validator_name, param, output, error): validator(param) assert error in str(e.value) + def run_test_ASFSearchOptions(**kwargs): - test_info = copy.copy(kwargs["test_info"]) - exception = test_info["exception"] # Can be "None" for don't. - if "expect_output" in test_info: - expect_output = test_info.pop("expect_output") + test_info = copy.copy(kwargs['test_info']) + exception = test_info['exception'] # Can be "None" for don't. + if 'expect_output' in test_info: + expect_output = test_info.pop('expect_output') else: expect_output = {} # Take out anything that isn't supposed to reach the options object: - del test_info["title"] - del test_info["exception"] + del test_info['title'] + del test_info['exception'] try: options_obj = ASFSearchOptions(**test_info) except (KeyError, ValueError) as e: - assert type(e).__name__ == exception, f"ERROR: Didn't expect exception {type(e).__name__} to occur." + assert ( + type(e).__name__ == exception + ), f"ERROR: Didn't expect exception {type(e).__name__} to occur." return else: - assert exception == None, f"ERROR: Expected exception {exception}, but SearchOptions never threw." + assert ( + exception == None + ), f'ERROR: Expected exception {exception}, but SearchOptions never threw.' for key, val in expect_output.items(): - assert getattr(options_obj, key) == val, f"ERROR: options object param '{key}' should have value '{val}'. Got '{getattr(options_obj, key)}'." + assert ( + getattr(options_obj, key) == val + ), f"ERROR: options object param '{key}' should have value '{val}'. Got '{getattr(options_obj, key)}'." # test ASFSearchOptions.reset_search() options_obj.reset_search() - - assert len([val for key, val in dict(options_obj).items() if key not in config.keys()]) == 0, "ERROR: ASFSearchOptions.reset() did not clear all non-default searchable params" - + + assert ( + len([val for key, val in dict(options_obj).items() if key not in config.keys()]) == 0 + ), 'ERROR: ASFSearchOptions.reset() did not clear all non-default searchable params' + for key, value in config.items(): if test_info.get(key) is not None: - assert getattr(options_obj, key) == test_info[key], f"ERROR: User defined value '{test_info[key]}' for default param '{key}', but value was lost after ASFSearchOptions.reset()" + assert ( + getattr(options_obj, key) == test_info[key] + ), f"ERROR: User defined value '{test_info[key]}' for default param '{key}', but value was lost after ASFSearchOptions.reset()" else: - assert getattr(options_obj, key) == value, f"ERROR: default param '{key}' left default by user changed, should have value '{val}'. Got '{getattr(options_obj, key)}'." + assert ( + getattr(options_obj, key) == value + ), f"ERROR: default param '{key}' left default by user changed, should have value '{val}'. Got '{getattr(options_obj, key)}'." diff --git a/tests/ASFSearchResults/test_ASFSearchResults.py b/tests/ASFSearchResults/test_ASFSearchResults.py index 397bc051..c28dcac8 100644 --- a/tests/ASFSearchResults/test_ASFSearchResults.py +++ b/tests/ASFSearchResults/test_ASFSearchResults.py @@ -11,7 +11,6 @@ from shapely.geometry import Polygon from shapely.wkt import loads -from shapely.ops import transform from shapely.geometry import shape from shapely.geometry.base import BaseGeometry from asf_search.CMR.translate import try_parse_date @@ -23,14 +22,15 @@ # when this replaces SearchAPI change values to cached API_URL = 'https://api.daac.asf.alaska.edu/services/search/param?' + def run_test_output_format(results: ASFSearchResults): - #search results are always sorted this way when returned from asf_search.search(), + # search results are always sorted this way when returned from asf_search.search(), # but not all test case resources are results.sort(key=lambda p: (p.properties['stopTime'], p.properties['fileID']), reverse=True) product_list_str = ','.join([product.properties['fileID'] for product in results]) results.searchComplete = True - + for output_type in ['csv', 'kml', 'metalink', 'jsonlite', 'jsonlite2', 'geojson']: expected = get_SearchAPI_Output(product_list_str, output_type) if output_type == 'csv': @@ -44,46 +44,48 @@ def run_test_output_format(results: ASFSearchResults): elif output_type == 'geojson': check_geojson(results) + def check_metalink(results: ASFSearchResults, expected_str: str): actual = ''.join([line for line in results.metalink()]) - + actual_tree = DefusedETree.fromstring(actual) expected_tree = DefusedETree.fromstring(expected_str) - + canon_actual = ETree.canonicalize(DefusedETree.tostring(actual_tree), strip_text=True) canon_expected = ETree.canonicalize(DefusedETree.tostring(expected_tree), strip_text=True) - + assert canon_actual == canon_expected + def check_kml(results: ASFSearchResults, expected_str: str): namespaces = {'kml': 'http://www.opengis.net/kml/2.2'} - placemarks_path = ".//kml:Placemark" + placemarks_path = './/kml:Placemark' expected_root = DefusedETree.fromstring(expected_str) expected_placemarks = expected_root.findall(placemarks_path, namespaces) actual_root = DefusedETree.fromstring(''.join([block for block in results.kml()])) actual_placemarks = actual_root.findall(placemarks_path, namespaces) - + # Check polygons for equivalence (asf-search starts from a different pivot) # and remove them from the kml so we can easily compare the rest of the placemark data for expected_placemark, actual_placemark in zip(expected_placemarks, actual_placemarks): expected_polygon = expected_placemark.findall('./*')[-1] actual_polygon = actual_placemark.findall('./*')[-1] - + expected_coords = get_coordinates_from_kml(DefusedETree.tostring(expected_polygon)) actual_coords = get_coordinates_from_kml(DefusedETree.tostring(actual_polygon)) - + assert Polygon(expected_coords).equals(Polygon(actual_coords)) - + expected_placemark.remove(expected_polygon) actual_placemark.remove(actual_polygon) - + # Get canonicalize xml strings so minor differences are normalized - actual_canon = ETree.canonicalize( DefusedETree.tostring(actual_root), strip_text=True) - expected_canon = ETree.canonicalize( DefusedETree.tostring(expected_root), strip_text=True) - - date_pattern = r"\>(?P[\w ]*time|Time): *(?P[^\<]*)\<" - + actual_canon = ETree.canonicalize(DefusedETree.tostring(actual_root), strip_text=True) + expected_canon = ETree.canonicalize(DefusedETree.tostring(expected_root), strip_text=True) + + date_pattern = r'\>(?P[\w ]*time|Time): *(?P[^\<]*)\<' + actual_dates = re.findall(date_pattern, actual_canon, re.MULTILINE) expected_date = re.findall(date_pattern, expected_canon, re.MULTILINE) @@ -91,7 +93,7 @@ def check_kml(results: ASFSearchResults, expected_str: str): date_str, date_value = match assert expected_date[idx][0] == date_str assert try_parse_date(expected_date[idx][1]) == try_parse_date(date_value) - + actual_canon = re.sub(date_pattern, '', actual_canon) expected_canon = re.sub(date_pattern, '', expected_canon) assert actual_canon == expected_canon @@ -101,9 +103,9 @@ def get_coordinates_from_kml(data: str): namespaces = {'kml': 'http://www.opengis.net/kml/2.2'} coords = [] - coords_lon_lat_path = ".//kml:outerBoundaryIs/kml:LinearRing/kml:coordinates" + coords_lon_lat_path = './/kml:outerBoundaryIs/kml:LinearRing/kml:coordinates' root = DefusedETree.fromstring(data) - + coordinates_elements = root.findall(coords_lon_lat_path, namespaces) for lon_lat_z in coordinates_elements[0].text.split('\n'): if len(lon_lat_z.split(',')) == 3: @@ -111,19 +113,19 @@ def get_coordinates_from_kml(data: str): coords.append([float(lon), float(lat)]) return coords - + def check_csv(results: ASFSearchResults, expected_str: str): expected = [product for product in csv.reader(expected_str.split('\n')) if product != []] # actual = [prod for prod in csv.reader(''.join([s for s in results.csv()]).split('\n')) if prod != []] - + expected = csv.DictReader(expected_str.split('\n')) actual = csv.DictReader([s for s in results.csv()]) - + for actual_row, expected_row in zip(actual, expected): actual_dict = dict(actual_row) expected_dict = dict(expected_row) - + for key in expected_dict.keys(): if expected_dict[key] in ['None', None, '']: assert actual_dict[key] in ['None', None, ''] @@ -131,26 +133,27 @@ def check_csv(results: ASFSearchResults, expected_str: str): try: expected_value = float(expected_dict[key]) actual_value = float(actual_dict[key]) - assert expected_value == actual_value, \ - f"expected '{expected_dict[key]}' for key '{key}', got '{actual_dict[key]}'" + assert ( + expected_value == actual_value + ), f"expected '{expected_dict[key]}' for key '{key}', got '{actual_dict[key]}'" except ValueError: try: expected_date = try_parse_date(expected_dict[key]) actual_date = try_parse_date(actual_dict[key]) - assert expected_date == actual_date, \ - f"Expected date '{expected_date}' for key '{key}', got '{actual_date}'" + assert ( + expected_date == actual_date + ), f"Expected date '{expected_date}' for key '{key}', got '{actual_date}'" except ValueError: - assert expected_dict[key] == actual_dict[key], \ - f"expected '{expected_dict[key]}' for key '{key}', got '{actual_dict[key]}'" - - + assert ( + expected_dict[key] == actual_dict[key] + ), f"expected '{expected_dict[key]}' for key '{key}', got '{actual_dict[key]}'" + + def check_jsonLite(results: ASFSearchResults, expected_str: str, output_type: str): jsonlite2 = output_type == 'jsonlite2' - + expected = json.loads(expected_str)['results'] - - if jsonlite2: wkt_key = 'w' wkt_unwrapped_key = 'wu' @@ -162,62 +165,71 @@ def check_jsonLite(results: ASFSearchResults, expected_str: str, output_type: st start_time_key = 'startTime' stop_time_key = 'stopTime' - - actual = json.loads(''.join(results.jsonlite2() if jsonlite2 else results.jsonlite()))['results'] + actual = json.loads(''.join(results.jsonlite2() if jsonlite2 else results.jsonlite()))[ + 'results' + ] for idx, expected_product in enumerate(expected): wkt = expected_product.pop(wkt_key) wkt_unwrapped = expected_product.pop(wkt_unwrapped_key) - + startTime = expected_product.pop(start_time_key) stopTime = expected_product.pop(stop_time_key) for key in expected_product.keys(): assert actual[idx][key] == expected_product[key] - + assert WKT.loads(actual[idx][wkt_key]).equals(WKT.loads(wkt)) assert WKT.loads(actual[idx][wkt_unwrapped_key]).equals(WKT.loads(wkt_unwrapped)) assert actual[idx][start_time_key] == try_parse_date(startTime) assert actual[idx][stop_time_key] == try_parse_date(stopTime) + def check_geojson(results: ASFSearchResults): expected = results.geojson() actual = asf.export.results_to_geojson(results) - + assert json.loads(''.join(actual)) == expected - + + def get_SearchAPI_Output(product_list: List[str], output_type: str) -> List[Dict]: response = requests.get(API_URL, [('product_list', product_list), ('output', output_type)]) response.raise_for_status() - + expected = response.text - + return expected + def run_test_ASFSearchResults_intersection(wkt: str): wrapped, unwrapped, _ = asf.validate_wkt(wkt) - unchanged_aoi = loads(wkt) # sometimes geometries don't come back with wrapping in mind + unchanged_aoi = loads(wkt) # sometimes geometries don't come back with wrapping in mind # exclude SMAP products - platforms = [ - PLATFORM.SENTINEL1, - PLATFORM.UAVSAR - ] - + platforms = [PLATFORM.SENTINEL1, PLATFORM.UAVSAR] + def overlap_check(s1: BaseGeometry, s2: BaseGeometry): return s1.overlaps(s2) or s1.touches(s2) or s2.distance(s1) <= 0.005 + asf.constants.INTERNAL.CMR_TIMEOUT = 60 for platform in platforms: try: results = asf.geo_search(intersectsWith=wkt, platform=platform, maxResults=250) except ASFSearchError as exc: asf.constants.INTERNAL.CMR_TIMEOUT = 30 - raise BaseException(f'Failed to perform intersection test with wkt: {wkt}\nplatform: {platform}.\nOriginal exception: {exc}') - + raise BaseException( + f'Failed to perform intersection test with wkt: {wkt}\nplatform: {platform}.\nOriginal exception: {exc}' + ) + asf.constants.INTERNAL.CMR_TIMEOUT = 30 for product in results: if shape(product.geometry).is_valid: - product_geom_wrapped, product_geom_unwrapped, _ = asf.validate_wkt(shape(product.geometry)) + product_geom_wrapped, product_geom_unwrapped, _ = asf.validate_wkt( + shape(product.geometry) + ) original_shape = unchanged_aoi - assert overlap_check(product_geom_wrapped, wrapped) or overlap_check(product_geom_wrapped, original_shape), f"OVERLAP FAIL: {product.properties['sceneName']}, {product.geometry} \nproduct: {product_geom_wrapped.wkt} \naoi: {wrapped.wkt}" + assert ( + overlap_check(product_geom_wrapped, wrapped) + or overlap_check(product_geom_wrapped, original_shape) + ), f"OVERLAP FAIL: {product.properties['sceneName']}, {product.geometry} \nproduct: {product_geom_wrapped.wkt} \naoi: {wrapped.wkt}" diff --git a/tests/ASFSession/test_ASFSession.py b/tests/ASFSession/test_ASFSession.py index 4c560281..f1cee773 100644 --- a/tests/ASFSession/test_ASFSession.py +++ b/tests/ASFSession/test_ASFSession.py @@ -9,21 +9,24 @@ from unittest.mock import patch + def run_auth_with_creds(username: str, password: str): session = ASFSession() session.auth_with_creds(username=username, password=password) + def run_auth_with_token(token: str): session = ASFSession() with patch('asf_search.ASFSession.post') as mock_token_session: if not token.startswith('Bearer EDL'): - mock_token_session.return_value.status_code = 400 - session.auth_with_token(token) + mock_token_session.return_value.status_code = 400 + session.auth_with_token(token) mock_token_session.return_value.status_code = 200 session.auth_with_token(token) + def run_auth_with_cookiejar(cookies: List): cookiejar = http.cookiejar.CookieJar() for cookie in cookies: @@ -36,13 +39,13 @@ def run_auth_with_cookiejar(cookies: List): request_cookiejar_session = ASFSession() request_cookiejar_session.auth_with_cookiejar(session.cookies) -def run_test_asf_session_rebuild_auth( - original_domain: str, - response_domain: str, - response_code: numbers.Number, - final_token - ): +def run_test_asf_session_rebuild_auth( + original_domain: str, + response_domain: str, + response_code: numbers.Number, + final_token, +): if final_token == 'None': final_token = None @@ -50,8 +53,8 @@ def run_test_asf_session_rebuild_auth( with patch('asf_search.ASFSession.post') as mock_token_session: mock_token_session.return_value.status_code = 200 - session.auth_with_token("bad_token") - + session.auth_with_token('bad_token') + req = requests.Request(original_domain) req.headers.update({'Authorization': 'Bearer fakeToken'}) @@ -64,10 +67,11 @@ def run_test_asf_session_rebuild_auth( with patch('asf_search.ASFSession._get_domain') as hostname_patch: hostname_patch.side_effect = [original_domain, response_domain] - + session.rebuild_auth(req, response) - assert req.headers.get("Authorization") == final_token + assert req.headers.get('Authorization') == final_token + def test_ASFSession_INTERNAL_mangling(): session = asf_search.ASFSession() @@ -78,7 +82,7 @@ def test_ASFSession_INTERNAL_mangling(): session.asf_auth_host = asf_search.constants.INTERNAL.CMR_COLLECTIONS session.cmr_collections = asf_search.constants.INTERNAL.AUTH_DOMAINS session.edl_client_id = asf_search.constants.INTERNAL.AUTH_COOKIES - + # get the current defaults since we're going to mangle them DEFAULT_EDL_HOST = asf_search.constants.INTERNAL.EDL_HOST DEFAULT_EDL_CLIENT_ID = asf_search.constants.INTERNAL.EDL_CLIENT_ID @@ -95,7 +99,7 @@ def test_ASFSession_INTERNAL_mangling(): auth_domains = ['custom_auth_domain'] uat_login_cookie = ['uat_urs_user_already_logged'] uat_login_domain = 'uat.urs.earthdata.nasa.gov' - + asf_search.constants.INTERNAL.CMR_HOST = uat_domain asf_search.constants.INTERNAL.EDL_HOST = uat_login_domain asf_search.constants.INTERNAL.AUTH_COOKIES = uat_login_cookie @@ -103,7 +107,7 @@ def test_ASFSession_INTERNAL_mangling(): asf_search.constants.INTERNAL.AUTH_DOMAINS = auth_domains asf_search.constants.INTERNAL.ASF_AUTH_HOST = auth_host asf_search.constants.INTERNAL.CMR_COLLECTIONS = cmr_collection - + mangeled_session = asf_search.ASFSession() # set them back @@ -124,13 +128,13 @@ def test_ASFSession_INTERNAL_mangling(): assert mangeled_session.edl_client_id == edl_client_id custom_session = asf_search.ASFSession( - cmr_host = uat_domain, - edl_host = uat_login_domain, - auth_cookie_names = uat_login_cookie, - auth_domains = auth_domains, - asf_auth_host = auth_host, - cmr_collections = cmr_collection, - edl_client_id = edl_client_id + cmr_host=uat_domain, + edl_host=uat_login_domain, + auth_cookie_names=uat_login_cookie, + auth_domains=auth_domains, + asf_auth_host=auth_host, + cmr_collections=cmr_collection, + edl_client_id=edl_client_id, ) assert custom_session.cmr_host == uat_domain @@ -141,6 +145,7 @@ def test_ASFSession_INTERNAL_mangling(): assert custom_session.cmr_collections == cmr_collection assert custom_session.edl_client_id == edl_client_id + def test_ASFSession_pooling(): uat_domain = 'cmr.uat.earthdata.nasa.gov' edl_client_id = 'custom_client_id' @@ -151,13 +156,13 @@ def test_ASFSession_pooling(): uat_login_domain = 'uat.urs.earthdata.nasa.gov' custom_session = asf_search.ASFSession( - cmr_host = uat_domain, - edl_host = uat_login_domain, - auth_cookie_names = uat_login_cookie, - auth_domains = auth_domains, - asf_auth_host = auth_host, - cmr_collections = cmr_collection, - edl_client_id = edl_client_id + cmr_host=uat_domain, + edl_host=uat_login_domain, + auth_cookie_names=uat_login_cookie, + auth_domains=auth_domains, + asf_auth_host=auth_host, + cmr_collections=cmr_collection, + edl_client_id=edl_client_id, ) Pool() pool = Pool(processes=2) @@ -167,17 +172,17 @@ def test_ASFSession_pooling(): def _assert_pooled_instance_variables(session): - uat_domain = 'cmr.uat.earthdata.nasa.gov' - edl_client_id = 'custom_client_id' - auth_host = 'custom_auth_host' - cmr_collection = '/search/granules' - auth_domains = ['custom_auth_domain'] - uat_login_cookie = ['uat_urs_user_already_logged'] - uat_login_domain = 'uat.urs.earthdata.nasa.gov' - assert session.cmr_host == uat_domain - assert session.edl_host == uat_login_domain - assert session.auth_cookie_names == uat_login_cookie - assert session.auth_domains == auth_domains - assert session.asf_auth_host == auth_host - assert session.cmr_collections == cmr_collection - assert session.edl_client_id == edl_client_id \ No newline at end of file + uat_domain = 'cmr.uat.earthdata.nasa.gov' + edl_client_id = 'custom_client_id' + auth_host = 'custom_auth_host' + cmr_collection = '/search/granules' + auth_domains = ['custom_auth_domain'] + uat_login_cookie = ['uat_urs_user_already_logged'] + uat_login_domain = 'uat.urs.earthdata.nasa.gov' + assert session.cmr_host == uat_domain + assert session.edl_host == uat_login_domain + assert session.auth_cookie_names == uat_login_cookie + assert session.auth_domains == auth_domains + assert session.asf_auth_host == auth_host + assert session.cmr_collections == cmr_collection + assert session.edl_client_id == edl_client_id diff --git a/tests/BaselineSearch/Stack/test_stack.py b/tests/BaselineSearch/Stack/test_stack.py index b707d8e0..985c31e4 100644 --- a/tests/BaselineSearch/Stack/test_stack.py +++ b/tests/BaselineSearch/Stack/test_stack.py @@ -5,22 +5,29 @@ from asf_search.search.search_generator import as_ASFProduct import pytest + + def run_test_find_new_reference(stack: List, output_index: Number) -> None: """ Test asf_search.baseline.stack.find_new_reference """ if stack == []: - assert(find_new_reference(stack) == None) + assert find_new_reference(stack) == None else: products = [as_ASFProduct(product, ASFSession()) for product in stack] for idx, product in enumerate(products): product = clear_baseline(stack[idx], product) - assert find_new_reference(products).properties['sceneName'] == stack[output_index]['properties']['sceneName'] + assert ( + find_new_reference(products).properties['sceneName'] + == stack[output_index]['properties']['sceneName'] + ) + def run_test_get_default_product_type(product: ASFStackableProduct, product_type: str) -> None: assert product.get_default_baseline_product_type() == product_type + def run_test_get_baseline_from_stack(reference, stack, output_stack, error): reference = as_ASFProduct(reference, ASFSession()) stack = ASFSearchResults([as_ASFProduct(product, ASFSession()) for product in stack]) @@ -54,10 +61,11 @@ def run_test_valid_state_vectors(reference, output): assert output == product.is_valid_reference() return + def clear_baseline(resource, product: ASFProduct): -# Baseline values can be restored from UMM in asfProduct constructor, -# this erases them again if the resource omitted them from the product - if (stateVectors:=resource['baseline'].get('stateVectors')): + # Baseline values can be restored from UMM in asfProduct constructor, + # this erases them again if the resource omitted them from the product + if stateVectors := resource['baseline'].get('stateVectors'): if stateVectors.get('positions') == {}: product.baseline = {'stateVectors': {'positions': {}, 'velocities': {}}} diff --git a/tests/BaselineSearch/test_baseline_search.py b/tests/BaselineSearch/test_baseline_search.py index 8a71b0a1..ba376cbd 100644 --- a/tests/BaselineSearch/test_baseline_search.py +++ b/tests/BaselineSearch/test_baseline_search.py @@ -9,39 +9,46 @@ from asf_search.search.search_generator import as_ASFProduct + def run_test_get_preprocessed_stack_params(product): reference = as_ASFProduct(product, ASFSession()) params = reference.get_stack_opts() original_properties = product['properties'] - - assert(params.processingLevel == [reference.get_default_baseline_product_type()]) - assert(params.insarStackId == original_properties['insarStackId']) - assert(len(dict(params)) == 2) - + + assert params.processingLevel == [reference.get_default_baseline_product_type()] + assert params.insarStackId == original_properties['insarStackId'] + assert len(dict(params)) == 2 + def run_test_get_unprocessed_stack_params(product): reference = as_ASFProduct(product, ASFSession()) params = reference.get_stack_opts() original_properties = product['properties'] - assert(original_properties['polarization'] in params.polarization) - + assert original_properties['polarization'] in params.polarization + if reference.properties['processingLevel'] == 'BURST': - assert([reference.properties['polarization']] == params.polarization) - assert([reference.properties['burst']['fullBurstID']] == params.fullBurstID) + assert [reference.properties['polarization']] == params.polarization + assert [reference.properties['burst']['fullBurstID']] == params.fullBurstID else: - assert(['VV', 'VV+VH'] == params.polarization if reference.properties['polarization'] in ['VV', 'VV+VH'] else ['HH','HH+HV'] == params.polarization) - assert(len(dict(params)) == 7) + assert ( + ['VV', 'VV+VH'] == params.polarization + if reference.properties['polarization'] in ['VV', 'VV+VH'] + else ['HH', 'HH+HV'] == params.polarization + ) + assert len(dict(params)) == 7 + def run_get_stack_opts_invalid_insarStackId(product): invalid_reference = as_ASFProduct(product, ASFSession()) - + invalid_reference.properties['insarStackId'] = '0' with pytest.raises(ASFBaselineError): invalid_reference.get_stack_opts() + def run_test_calc_temporal_baselines(reference, stack): reference = as_ASFProduct(reference, ASFSession()) stack = ASFSearchResults([as_ASFProduct(product, ASFSession()) for product in stack]) @@ -49,38 +56,52 @@ def run_test_calc_temporal_baselines(reference, stack): calculate_temporal_baselines(reference, stack) - assert(len(stack) == stackLength) + assert len(stack) == stackLength for secondary in stack: - assert('temporalBaseline' in secondary.properties) + assert 'temporalBaseline' in secondary.properties + def run_test_stack_from_product(reference, stack): reference = as_ASFProduct(reference, ASFSession()) with patch('asf_search.baseline_search.search') as search_mock: - search_mock.return_value = ASFSearchResults([as_ASFProduct(product, ASFSession()) for product in stack]) + search_mock.return_value = ASFSearchResults( + [as_ASFProduct(product, ASFSession()) for product in stack] + ) stack = stack_from_product(reference) - for (idx, secondary) in enumerate(stack): - if(idx > 0): - assert(secondary.properties['temporalBaseline'] >= stack[idx - 1].properties['temporalBaseline']) + for idx, secondary in enumerate(stack): + if idx > 0: + assert ( + secondary.properties['temporalBaseline'] + >= stack[idx - 1].properties['temporalBaseline'] + ) + def run_test_stack_from_id(stack_id: str, reference, stack): - temp = deepcopy(stack) - - with patch('asf_search.baseline_search.product_search') as mock_product_search: - mock_product_search.return_value = ASFSearchResults([as_ASFProduct(product, ASFSession()) for product in stack]) - - if not stack_id: - with pytest.raises(ASFSearchError): - stack_from_id(stack_id) - else: - with patch('asf_search.baseline_search.search') as search_mock: - search_mock.return_value = ASFSearchResults([as_ASFProduct(product, ASFSession()) for product in temp]) - - returned_stack = stack_from_id(stack_id) - assert(len(returned_stack) == len(stack)) - - for (idx, secondary) in enumerate(returned_stack): - if(idx > 0): - assert(secondary.properties['temporalBaseline'] >= stack[idx - 1]["properties"]['temporalBaseline']) + temp = deepcopy(stack) + + with patch('asf_search.baseline_search.product_search') as mock_product_search: + mock_product_search.return_value = ASFSearchResults( + [as_ASFProduct(product, ASFSession()) for product in stack] + ) + + if not stack_id: + with pytest.raises(ASFSearchError): + stack_from_id(stack_id) + else: + with patch('asf_search.baseline_search.search') as search_mock: + search_mock.return_value = ASFSearchResults( + [as_ASFProduct(product, ASFSession()) for product in temp] + ) + + returned_stack = stack_from_id(stack_id) + assert len(returned_stack) == len(stack) + + for idx, secondary in enumerate(returned_stack): + if idx > 0: + assert ( + secondary.properties['temporalBaseline'] + >= stack[idx - 1]['properties']['temporalBaseline'] + ) diff --git a/tests/CMR/test_MissionList.py b/tests/CMR/test_MissionList.py index 75c08442..7abb32c6 100644 --- a/tests/CMR/test_MissionList.py +++ b/tests/CMR/test_MissionList.py @@ -6,20 +6,27 @@ from asf_search.constants.INTERNAL import CMR_COLLECTIONS_PATH, CMR_HOST from asf_search.exceptions import CMRError - + def test_getMissions_error(): with requests_mock.Mocker() as m: - m.register_uri('POST', f"https://" + CMR_HOST + CMR_COLLECTIONS_PATH, status_code=300, json={'error': {'report': ""}}) - - with pytest.raises(CMRError): + m.register_uri( + 'POST', + f'https://' + CMR_HOST + CMR_COLLECTIONS_PATH, + status_code=300, + json={'error': {'report': ''}}, + ) + + with pytest.raises(CMRError): get_campaigns({}) + def test_getMissions_error_parsing(): with requests_mock.Mocker() as m: - m.post(f"https://" + CMR_HOST + CMR_COLLECTIONS_PATH) - + m.post(f'https://' + CMR_HOST + CMR_COLLECTIONS_PATH) + with pytest.raises(CMRError): get_campaigns({}) + def run_test_get_project_names(cmr_ummjson, campaigns): assert _get_project_names(cmr_ummjson) == campaigns diff --git a/tests/Search/test_search.py b/tests/Search/test_search.py index 0e32aae8..e6fca4f0 100644 --- a/tests/Search/test_search.py +++ b/tests/Search/test_search.py @@ -22,19 +22,22 @@ SEARCHAPI_URL = 'https://api.daac.asf.alaska.edu' SEARCHAPI_ENDPOINT = '/services/search/param?' + def run_test_ASFSearchResults(search_resp): - search_results = ASFSearchResults([as_ASFProduct(product, ASFSession()) for product in search_resp]) + search_results = ASFSearchResults( + [as_ASFProduct(product, ASFSession()) for product in search_resp] + ) - assert(len(search_results) == len(search_resp)) - assert(search_results.geojson()['type'] == 'FeatureCollection') + assert len(search_results) == len(search_resp) + assert search_results.geojson()['type'] == 'FeatureCollection' - for (idx, feature) in enumerate(search_results): - # temporal and perpendicular baseline values are calculated post-search, + for idx, feature in enumerate(search_results): + # temporal and perpendicular baseline values are calculated post-search, # so there's no instance where they'll be returned in a CMR search search_resp[idx]['properties'].pop('temporalBaseline', None) search_resp[idx]['properties'].pop('perpendicularBaseline', None) - assert(feature.geojson()['geometry'] == search_resp[idx]['geometry']) + assert feature.geojson()['geometry'] == search_resp[idx]['geometry'] for key, item in feature.geojson()['properties'].items(): if key == 'esaFrame': assert search_resp[idx]['properties']['frameNumber'] == item @@ -45,30 +48,39 @@ def run_test_ASFSearchResults(search_resp): elif search_resp[idx]['properties'].get(key) is not None and item is not None: assert item == search_resp[idx]['properties'][key] + def run_test_search(search_parameters, answer): with requests_mock.Mocker() as m: - m.post(f"https://{INTERNAL.CMR_HOST}{INTERNAL.CMR_GRANULE_PATH}", json={'items': answer, 'hits': len(answer)}) + m.post( + f'https://{INTERNAL.CMR_HOST}{INTERNAL.CMR_GRANULE_PATH}', + json={'items': answer, 'hits': len(answer)}, + ) response = search(**search_parameters) - if search_parameters.get("maxResults", False): - assert(len(response) == search_parameters["maxResults"]) + if search_parameters.get('maxResults', False): + assert len(response) == search_parameters['maxResults'] - assert(len(response) == len(answer)) + assert len(response) == len(answer) # assert(response.geojson()["features"] == answer) + def run_test_search_http_error(search_parameters, status_code: Number, report: str): - if not len(search_parameters.keys()): with requests_mock.Mocker() as m: - m.register_uri('POST', f"https://{INTERNAL.CMR_HOST}{INTERNAL.CMR_GRANULE_PATH}", status_code=status_code, json={'errors': {'report': report}}) - m.register_uri('POST', f"https://search-error-report.asf.alaska.edu/", real_http=True) + m.register_uri( + 'POST', + f'https://{INTERNAL.CMR_HOST}{INTERNAL.CMR_GRANULE_PATH}', + status_code=status_code, + json={'errors': {'report': report}}, + ) + m.register_uri('POST', f'https://search-error-report.asf.alaska.edu/', real_http=True) searchOptions = ASFSearchOptions(**search_parameters) with raises(ASFSearchError): results = search(opts=searchOptions) return # If we're not doing an empty search we want to fire off one real query to CMR, then interrupt it with an error - # We can tell a search isn't the first one by checking if 'CMR-Search-After' has been set + # We can tell a search isn't the first one by checking if 'CMR-Search-After' has been set def custom_matcher(request: requests.Request): if 'CMR-Search-After' in request.headers.keys(): resp = requests.Response() @@ -77,33 +89,58 @@ def custom_matcher(request: requests.Request): return None with requests_mock.Mocker() as m: - m.register_uri('POST', f"https://{INTERNAL.CMR_HOST}{INTERNAL.CMR_GRANULE_PATH}", real_http=True) - m.register_uri('POST', f"https://{INTERNAL.CMR_HOST}{INTERNAL.CMR_GRANULE_PATH}", additional_matcher=custom_matcher, status_code=status_code, json={'errors': {'report': report}}) - m.register_uri('POST', f"https://search-error-report.asf.alaska.edu/", real_http=True) + m.register_uri( + 'POST', + f'https://{INTERNAL.CMR_HOST}{INTERNAL.CMR_GRANULE_PATH}', + real_http=True, + ) + m.register_uri( + 'POST', + f'https://{INTERNAL.CMR_HOST}{INTERNAL.CMR_GRANULE_PATH}', + additional_matcher=custom_matcher, + status_code=status_code, + json={'errors': {'report': report}}, + ) + m.register_uri('POST', f'https://search-error-report.asf.alaska.edu/', real_http=True) search_parameters['maxResults'] = INTERNAL.CMR_PAGE_SIZE + 1 searchOptions = ASFSearchOptions(**search_parameters) - + with raises(ASFSearchError): results = search(opts=searchOptions) + def run_test_dataset_search(datasets: List): if any(dataset for dataset in datasets if dataset_collections.get(dataset) is None): with raises(ValueError): search(dataset=datasets, maxResults=1) - else: + else: for dataset in datasets: valid_shortnames = list(dataset_collections.get(dataset)) response = search(dataset=dataset, maxResults=250) # Get collection shortName of all granules - shortNames = list(set([shortName for product in response if (shortName:=ASFProduct.umm_get(product.umm, 'CollectionReference', 'ShortName')) is not None])) + shortNames = list( + set( + [ + shortName + for product in response + if ( + shortName := ASFProduct.umm_get( + product.umm, 'CollectionReference', 'ShortName' + ) + ) + is not None + ] + ) + ) # and check that results are limited to the expected datasets by their shortname for shortName in shortNames: assert shortName in valid_shortnames + def run_test_build_subqueries(params: ASFSearchOptions, expected: List): # mainly for getting platform aliases preprocess_opts(params) @@ -116,18 +153,21 @@ def run_test_build_subqueries(params: ASFSearchOptions, expected: List): for idx, key_value_pair in enumerate(actual_val): assert key_value_pair == expected_val[idx] else: - if len(actual_val) > 0: # ASFSearchOptions leaves empty lists as None + if len(actual_val) > 0: # ASFSearchOptions leaves empty lists as None expected_set = set(expected_val) actual_set = set(actual_val) difference = expected_set.symmetric_difference(actual_set) - assert len(difference) == 0, f"Found {len(difference)} missing entries for subquery generated keyword: \"{key}\"\n{list(difference)}" + assert ( + len(difference) == 0 + ), f'Found {len(difference)} missing entries for subquery generated keyword: "{key}"\n{list(difference)}' else: assert actual_val == expected_val + def run_test_keyword_aliasing_results(params: ASFSearchOptions): module_response = search(opts=params) - + try: api_response = query_endpoint(dict(params)) except requests.ReadTimeout as exc: @@ -137,15 +177,23 @@ def run_test_keyword_aliasing_results(params: ASFSearchOptions): api_results = api_response['results'] api_dict = {product['granuleName']: True for product in api_results} - + for product in module_response: sceneName = product.properties['sceneName'] - assert api_dict.get(sceneName, False), f'Found unexpected scene in asf-search module results, {sceneName}\{dict(params)}' - - -@retry(stop=stop_after_attempt(3), retry=retry_if_exception_type(requests.HTTPError), reraise=True) + assert api_dict.get( + sceneName, False + ), f'Found unexpected scene in asf-search module results, {sceneName}\{dict(params)}' + + +@retry( + stop=stop_after_attempt(3), + retry=retry_if_exception_type(requests.HTTPError), + reraise=True, +) def query_endpoint(params): - response = requests.post(url=SEARCHAPI_URL+SEARCHAPI_ENDPOINT, data={**params, 'output':'jsonlite'}) + response = requests.post( + url=SEARCHAPI_URL + SEARCHAPI_ENDPOINT, data={**params, 'output': 'jsonlite'} + ) response.raise_for_status() - return response.json() \ No newline at end of file + return response.json() diff --git a/tests/Search/test_search_generator.py b/tests/Search/test_search_generator.py index f1b248bc..196ac7cc 100644 --- a/tests/Search/test_search_generator.py +++ b/tests/Search/test_search_generator.py @@ -5,16 +5,19 @@ import math + def run_test_search_generator_multi(search_opts: List[ASFSearchOptions]): queries = [search_generator(opts=opts) for opts in search_opts] - + expected_results_size = sum([opts.maxResults for opts in search_opts]) - expected_page_count = sum([math.ceil(opts.maxResults / INTERNAL.CMR_PAGE_SIZE) for opts in search_opts]) + expected_page_count = sum( + [math.ceil(opts.maxResults / INTERNAL.CMR_PAGE_SIZE) for opts in search_opts] + ) combined_results = [] - + page_count = 0 searches = {} - + for opt in search_opts: if isinstance(opt.platform, list): for platform in opt.platform: @@ -22,7 +25,7 @@ def run_test_search_generator_multi(search_opts: List[ASFSearchOptions]): else: searches[opt.platform] = False - while(len(queries)): + while len(queries): queries_iter = iter(queries) for idx, query in enumerate(queries_iter): # Alternate pages between results page = next(query, None) @@ -44,26 +47,27 @@ def run_test_search_generator_multi(search_opts: List[ASFSearchOptions]): assert len(combined_results) == expected_results_size assert len([completed for completed in searches if completed]) >= len(search_opts) + def run_test_search_generator(search_opts: ASFSearchOptions): pages_iter = search_generator(opts=search_opts) - + page_count = int(search_opts.maxResults / INTERNAL.CMR_PAGE_SIZE) page_idx = 0 - + results = ASFSearchResults([]) for page in pages_iter: results.extend(page) results.searchComplete = page.searchComplete results.searchOptions = page.searchOptions page_idx += 1 - + assert page_count <= page_idx assert len(results) <= search_opts.maxResults assert results.searchComplete == True - + preprocess_opts(search_opts) for key, val in search_opts: if key != 'maxResults': - assert getattr(results.searchOptions, key) == val \ No newline at end of file + assert getattr(results.searchOptions, key) == val diff --git a/tests/Serialization/test_serialization.py b/tests/Serialization/test_serialization.py index 02eb92e1..011aa422 100644 --- a/tests/Serialization/test_serialization.py +++ b/tests/Serialization/test_serialization.py @@ -7,7 +7,6 @@ from asf_search.search.search_generator import as_ASFProduct - def run_test_serialization(product=None, results=None, opts=ASFSearchOptions()): if product is None: to_serialize = ASFSearchResults([json_to_product(prod) for prod in results]) @@ -15,7 +14,7 @@ def run_test_serialization(product=None, results=None, opts=ASFSearchOptions()): to_serialize = ASFSearchResults([json_to_product(product)]) with open('serialized_product.json', 'w') as f: - f.write(json.dumps({"results": to_serialize.geojson(), "opts": dict(opts)})) + f.write(json.dumps({'results': to_serialize.geojson(), 'opts': dict(opts)})) f.close() with open('serialized_product.json', 'r') as f: @@ -23,7 +22,7 @@ def run_test_serialization(product=None, results=None, opts=ASFSearchOptions()): f.close() os.remove('serialized_product.json') - + deserialized_results = deserialized.get('results') deserialized_opts = deserialized.get('opts') @@ -33,7 +32,7 @@ def run_test_serialization(product=None, results=None, opts=ASFSearchOptions()): for idx, original in enumerate(to_serialize): assert deserialized_results['features'][idx]['properties'] == original.properties assert deserialized_results['features'][idx]['geometry'] == original.geometry - + assert deserialized_results['type'] == 'FeatureCollection' diff --git a/tests/WKT/test_validate_wkt.py b/tests/WKT/test_validate_wkt.py index ce82f08e..32a0e14d 100644 --- a/tests/WKT/test_validate_wkt.py +++ b/tests/WKT/test_validate_wkt.py @@ -8,12 +8,12 @@ from asf_search.WKT.validate_wkt import ( validate_wkt, _search_wkt_prep, - _get_clamped_and_wrapped_geometry, - _get_convex_hull, - _merge_overlapping_geometry, + _get_clamped_and_wrapped_geometry, + _get_convex_hull, + _merge_overlapping_geometry, _counter_clockwise_reorientation, _simplify_aoi, - _get_shape_coords + _get_shape_coords, ) from asf_search.exceptions import ASFWKTError @@ -26,39 +26,48 @@ def run_test_validate_wkt_invalid_wkt_error(wkt: str): def run_test_validate_wkt_valid_wkt(wkt: str, validated_wkt: str): expected_aoi = loads(validated_wkt) actual_wrapped, actual_unwrapped, _ = validate_wkt(wkt) - - assert actual_wrapped.equals(expected_aoi), f"expected, {expected_aoi.wkt}, got {actual_wrapped.wkt}" - + + assert actual_wrapped.equals( + expected_aoi + ), f'expected, {expected_aoi.wkt}, got {actual_wrapped.wkt}' + actual_from_geom_wrapped, actual_from_geom_unwrapped, _ = validate_wkt(loads(wkt)) assert actual_from_geom_wrapped.equals(expected_aoi) -def run_test_validate_wkt_clamp_geometry(wkt: str, clamped_wkt: str, clamped_count: Number, wrapped_count: Number): + +def run_test_validate_wkt_clamp_geometry( + wkt: str, clamped_wkt: str, clamped_count: Number, wrapped_count: Number +): resp = _get_clamped_and_wrapped_geometry(loads(wkt)) assert resp[0].wkt == clamped_wkt - + if clamped_count > 0: assert resp[2][0].report.split(' ')[2] == str(clamped_count) - + if wrapped_count > 0: assert resp[2][1].report.split(' ')[2] == str(wrapped_count) + def run_test_validate_wkt_convex_hull(wkt: str, corrected_wkt: str): shape = loads(wkt) - assert corrected_wkt == _get_convex_hull(shape)[0].wkt + assert corrected_wkt == _get_convex_hull(shape)[0].wkt + def run_test_validate_wkt_merge_overlapping_geometry(wkt: str, merged_wkt: str): shape = loads(wkt) - + overlapping = _merge_overlapping_geometry(shape) if isinstance(overlapping, BaseMultipartGeometry): overlapping = overlapping.geoms assert overlapping[0].equals(loads(merged_wkt)) + def run_test_validate_wkt_counter_clockwise_reorientation(wkt: str, cc_wkt: str): shape = loads(wkt) - + assert cc_wkt == _counter_clockwise_reorientation(shape)[0].wkt + def run_test_validate_wkt_get_shape_coords(wkt: str, coords: List[Number]): shape = loads(wkt) shape_coords = [[coord[0], coord[1]] for coord in _get_shape_coords(shape)] @@ -69,18 +78,20 @@ def run_test_validate_wkt_get_shape_coords(wkt: str, coords: List[Number]): assert len(shape_coords) == len(coords) assert shape_coords == coords + def run_test_search_wkt_prep(wkt: str): if wkt == ' ': with pytest.raises(ASFWKTError): _search_wkt_prep(None) - + return shape = loads(wkt) ls = _search_wkt_prep(shape) assert ls.geometryType() == shape.geometryType() assert shape.wkt == wkt - + + def run_test_simplify_aoi(wkt: str, simplified: str, repairs: List[str]): shape = loads(wkt) resp, shape_repairs = _simplify_aoi(shape) diff --git a/tests/download/test_download.py b/tests/download/test_download.py index a89cd872..845515b8 100644 --- a/tests/download/test_download.py +++ b/tests/download/test_download.py @@ -7,20 +7,21 @@ from asf_search.download.download import download_url + def run_test_download_url_auth_error(url, path, filename): with patch('asf_search.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 401 - mock_get.return_value = resp - - if url == "pathError": + mock_get.return_value = resp + + if url == 'pathError': with pytest.raises(ASFDownloadError): download_url(url, path, filename) with patch('os.path.isdir') as path_mock: path_mock.return_value = True - if url == "urlError": + if url == 'urlError': with patch('os.path.isfile') as isfile_mock: isfile_mock.return_value = False @@ -29,17 +30,18 @@ def run_test_download_url_auth_error(url, path, filename): with patch('os.path.isfile') as isfile_mock: isfile_mock.return_value = True - + with pytest.warns(Warning): download_url(url, path, filename) + def run_test_download_url(url, path, filename): if filename == 'BURST': with patch('asf_search.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 202 resp.headers.update({'content-type': 'application/json'}) - mock_get.return_value = resp + mock_get.return_value = resp with patch('asf_search.ASFSession.get') as mock_get_burst: resp_2 = requests.Response() @@ -47,7 +49,7 @@ def run_test_download_url(url, path, filename): resp_2.headers.update({'content-type': 'image/tiff'}) mock_get_burst.return_value = resp_2 resp_2.iter_content = lambda chunk_size: [] - + with patch('builtins.open', unittest.mock.mock_open()) as m: download_url(url, path, filename) else: @@ -56,6 +58,6 @@ def run_test_download_url(url, path, filename): resp.status_code = 200 mock_get.return_value = resp resp.iter_content = lambda chunk_size: [] - + with patch('builtins.open', unittest.mock.mock_open()) as m: download_url(url, path, filename) diff --git a/tests/pytest-managers.py b/tests/pytest-managers.py index 96d2cb5e..89ca7960 100644 --- a/tests/pytest-managers.py +++ b/tests/pytest-managers.py @@ -1,14 +1,47 @@ from typing import Dict, List -from asf_search import ASFSearchOptions, ASFSession, FileDownloadType, ASFStackableProduct +from asf_search import ( + ASFSearchOptions, + ASFSession, + FileDownloadType, +) from asf_search.exceptions import ASFAuthenticationError -from ASFProduct.test_ASFProduct import run_test_ASFProduct, run_test_ASFProduct_download, run_test_product_get_stack_options, run_test_stack +from ASFProduct.test_ASFProduct import ( + run_test_ASFProduct, + run_test_ASFProduct_download, + run_test_product_get_stack_options, + run_test_stack, +) from ASFSearchOptions.test_ASFSearchOptions import run_test_ASFSearchOptions -from ASFSearchResults.test_ASFSearchResults import run_test_output_format, run_test_ASFSearchResults_intersection -from ASFSession.test_ASFSession import run_auth_with_cookiejar, run_auth_with_creds, run_auth_with_token, run_test_asf_session_rebuild_auth -from BaselineSearch.test_baseline_search import * -from Search.test_search import run_test_ASFSearchResults, run_test_build_subqueries, run_test_dataset_search, run_test_keyword_aliasing_results, run_test_search, run_test_search_http_error -from Search.test_search_generator import run_test_search_generator, run_test_search_generator_multi +from ASFSearchResults.test_ASFSearchResults import ( + run_test_ASFSearchResults_intersection, +) +from ASFSession.test_ASFSession import ( + run_auth_with_cookiejar, + run_auth_with_creds, + run_auth_with_token, + run_test_asf_session_rebuild_auth, +) +from BaselineSearch.test_baseline_search import ( + run_test_stack_from_id, + run_test_stack_from_product, + run_test_calc_temporal_baselines, + run_get_stack_opts_invalid_insarStackId, + run_test_get_unprocessed_stack_params, + run_test_get_preprocessed_stack_params, +) +from Search.test_search import ( + run_test_ASFSearchResults, + run_test_build_subqueries, + run_test_dataset_search, + run_test_keyword_aliasing_results, + run_test_search, + run_test_search_http_error, +) +from Search.test_search_generator import ( + run_test_search_generator, + run_test_search_generator_multi, +) from CMR.test_MissionList import run_test_get_project_names @@ -19,12 +52,33 @@ import pathlib import yaml -from WKT.test_validate_wkt import run_test_search_wkt_prep, run_test_validate_wkt_get_shape_coords, run_test_validate_wkt_clamp_geometry, run_test_validate_wkt_valid_wkt, run_test_validate_wkt_convex_hull, run_test_validate_wkt_counter_clockwise_reorientation, run_test_validate_wkt_invalid_wkt_error, run_test_validate_wkt_merge_overlapping_geometry, run_test_simplify_aoi -from ASFSearchOptions.test_ASFSearchOptions import run_test_ASFSearchOptions_validator, run_test_validator_map_validate -from BaselineSearch.Stack.test_stack import run_test_find_new_reference, run_test_get_baseline_from_stack, run_test_get_default_product_type, run_test_valid_state_vectors +from WKT.test_validate_wkt import ( + run_test_search_wkt_prep, + run_test_validate_wkt_get_shape_coords, + run_test_validate_wkt_clamp_geometry, + run_test_validate_wkt_valid_wkt, + run_test_validate_wkt_convex_hull, + run_test_validate_wkt_counter_clockwise_reorientation, + run_test_validate_wkt_invalid_wkt_error, + run_test_validate_wkt_merge_overlapping_geometry, + run_test_simplify_aoi, +) +from ASFSearchOptions.test_ASFSearchOptions import ( + run_test_ASFSearchOptions_validator, + run_test_validator_map_validate, +) +from BaselineSearch.Stack.test_stack import ( + run_test_find_new_reference, + run_test_get_baseline_from_stack, + run_test_get_default_product_type, + run_test_valid_state_vectors, +) from asf_search.search.search_generator import as_ASFProduct -from download.test_download import run_test_download_url, run_test_download_url_auth_error +from download.test_download import ( + run_test_download_url, + run_test_download_url_auth_error, +) from Serialization.test_serialization import run_test_serialization import nbformat from nbconvert.preprocessors import ExecutePreprocessor @@ -35,31 +89,34 @@ def test_ASFProduct(**args) -> None: """ Tests Basic ASFProduct with mock searchAPI response """ - test_info = args["test_info"] - geographic_response = get_resource(test_info["products"]) + test_info = args['test_info'] + geographic_response = get_resource(test_info['products']) run_test_ASFProduct(geographic_response) + def test_ASFProduct_Stack(**args) -> None: """ Tests ASFProduct.stack() with reference and corresponding stack Checks for temporalBaseline order, asserting the stack is ordered by the scene's temporalBaseline (in ascending order) """ - test_info = args["test_info"] - reference = get_resource(test_info["product"]) - preprocessed_stack = get_resource(test_info["preprocessed_stack"]) - processed_stack = get_resource(test_info["processed_stack"]) + test_info = args['test_info'] + reference = get_resource(test_info['product']) + preprocessed_stack = get_resource(test_info['preprocessed_stack']) + processed_stack = get_resource(test_info['processed_stack']) run_test_stack(reference, preprocessed_stack, processed_stack) + def test_ASFProduct_get_stack_options(**args) -> None: - test_info = args["test_info"] + test_info = args['test_info'] reference = get_resource(test_info['product']) options = get_resource(test_info['options']) run_test_product_get_stack_options(reference, options) + def test_ASFProduct_download(**args) -> None: - test_info = args["test_info"] + test_info = args['test_info'] reference = get_resource(test_info['product']) filename = test_info['filename'] filetype_raw = test_info['filetype'] @@ -74,54 +131,59 @@ def test_ASFProduct_download(**args) -> None: run_test_ASFProduct_download(reference, filename, filetype, additional_urls) + # asf_search.ASFSession Tests def test_ASFSession_Error(**args) -> None: """ Test ASFSession.auth_with_creds for sign in errors """ - test_info = args["test_info"] - username = test_info["username"] - password = test_info["password"] + test_info = args['test_info'] + username = test_info['username'] + password = test_info['password'] with patch('asf_search.ASFSession.get') as mock_get: - mock_get.return_value = "Error" + mock_get.return_value = 'Error' with raises(ASFAuthenticationError): run_auth_with_creds(username, password) + def test_ASFSession_Token_Error(**args) -> None: """ Test ASFSession.auth_with_token for sign in errors """ - test_info = args["test_info"] - token = test_info["token"] + test_info = args['test_info'] + token = test_info['token'] with raises(ASFAuthenticationError): run_auth_with_token(token) + def test_ASFSession_Cookie_Error(**args) -> None: """ Test ASFSession.auth_with_cookie for sign in errors """ - test_info = args["test_info"] - cookies = test_info["cookies"] + test_info = args['test_info'] + cookies = test_info['cookies'] with raises(ASFAuthenticationError): run_auth_with_cookiejar(cookies) + def test_asf_session_rebuild_auth(**args) -> None: """ Test asf_search.ASFSession.rebuild_auth When redirecting from an ASF domain, only accept domains listed in ASFSession.AUTH_DOMAINS """ - test_info = args["test_info"] - original_domain = test_info["original_domain"] - response_domain = test_info["response_domain"] - response_code = test_info["response_code"] - final_token = test_info["final_token"] + test_info = args['test_info'] + original_domain = test_info['original_domain'] + response_domain = test_info['response_domain'] + response_code = test_info['response_code'] + final_token = test_info['final_token'] run_test_asf_session_rebuild_auth(original_domain, response_domain, response_code, final_token) + # asf_search.search.baseline_search Tests def test_get_preprocessed_stack_params(**args) -> None: """ @@ -130,94 +192,102 @@ def test_get_preprocessed_stack_params(**args) -> None: \n1. processingLevel \n2. insarStackId """ - test_info = args["test_info"] - reference = get_resource(test_info["product"]) + test_info = args['test_info'] + reference = get_resource(test_info['product']) run_test_get_preprocessed_stack_params(reference) + def test_get_unprocessed_stack_params(**args) -> None: """ Test asf_search.search.baseline_search.get_stack_opts with a reference scene that's not part of a pre-calculated platform, asserting that get_stack_opts returns an object with seven parameters """ - test_info = args["test_info"] - reference = get_resource(test_info["product"]) + test_info = args['test_info'] + reference = get_resource(test_info['product']) run_test_get_unprocessed_stack_params(reference) + def test_get_stack_opts_invalid_insarStackId(**args) -> None: """ Test asf_search.search.baseline_search.get_stack_opts with a the reference scene's insarStackID set to an invalid value, and asserting an ASFBaselineError is raised """ - test_info = args["test_info"] - reference = get_resource(test_info["product"]) + test_info = args['test_info'] + reference = get_resource(test_info['product']) run_get_stack_opts_invalid_insarStackId(reference) + def test_temporal_baseline(**args) -> None: """ Test asf_search.search.baseline_search.calc_temporal_baselines, asserting mutated baseline stack is still the same length and that each product's properties contain a temporalBaseline key """ - test_info = args["test_info"] - reference = get_resource(test_info["product"]) - stack = get_resource(test_info["stack"]) + test_info = args['test_info'] + reference = get_resource(test_info['product']) + stack = get_resource(test_info['stack']) run_test_calc_temporal_baselines(reference, stack) + def test_stack_from_product(**args) -> None: """ Test asf_search.search.baseline_search.stack_from_product, asserting stack returned is ordered by temporalBaseline value in ascending order """ - test_info = args["test_info"] - reference = get_resource(test_info["product"]) - stack = get_resource(test_info["stack"]) + test_info = args['test_info'] + reference = get_resource(test_info['product']) + stack = get_resource(test_info['stack']) run_test_stack_from_product(reference, stack) + def test_stack_from_id(**args) -> None: """ Test asf_search.search.baseline_search.stack_from_id, asserting stack returned is ordered by temporalBaseline value in ascending order """ - test_info = args["test_info"] - stack_id = test_info["stack_id"] - stack_reference_data = test_info["stack_reference"] - stack_data = test_info["stack"] + test_info = args['test_info'] + stack_id = test_info['stack_id'] + stack_reference_data = test_info['stack_reference'] + stack_data = test_info['stack'] stack_reference = get_resource(stack_reference_data) stack = [] - if(stack_data != []): + if stack_data != []: stack = get_resource(stack_data) run_test_stack_from_id(stack_id, stack_reference, stack) + # asf_search.ASFSearchResults Tests def test_ASFSearchResults(**args) -> None: """ Test asf_search.ASFSearchResults, asserting initialized values, and geojson response returns object with type FeatureCollection """ - test_info = args["test_info"] - search_response = get_resource(test_info["response"]) + test_info = args['test_info'] + search_response = get_resource(test_info['response']) run_test_ASFSearchResults(search_response) + # asf_search.search Tests def test_ASFSearch_Search(**args) -> None: """ Test asf_search.search, asserting returned value is expected result """ - test_info = args["test_info"] - parameters = get_resource(test_info["parameters"]) - answer = get_resource(test_info["answer"]) + test_info = args['test_info'] + parameters = get_resource(test_info['parameters']) + answer = get_resource(test_info['answer']) run_test_search(parameters, answer) + def test_ASFSearch_Search_Generator(**args) -> None: - test_info = args["test_info"] + test_info = args['test_info'] params = get_resource(test_info['parameters']) if isinstance(params, list): @@ -236,133 +306,146 @@ def test_ASFSearch_Search_Error(**args) -> None: Test asf_search.search errors, asserting server and client errors are raised """ - test_info = args["test_info"] - parameters = test_info["parameters"] - report = test_info["report"] - error_code = test_info["status_code"] + test_info = args['test_info'] + parameters = test_info['parameters'] + report = test_info['report'] + error_code = test_info['status_code'] run_test_search_http_error(parameters, error_code, report) + def test_wkt_validation_Invalid_WKT_Error(**args) -> None: """ Test asf_search.wkt errors, asserting wkt validation errors are raised """ - test_info = args["test_info"] + test_info = args['test_info'] wkt = get_resource(test_info['wkt']) run_test_validate_wkt_invalid_wkt_error(wkt) + def test_wkt_validation_WKT_Valid(**args) -> None: """ Test asf_search.validate_wkt, asserting expected wkts are returned """ - test_info = args["test_info"] + test_info = args['test_info'] wkt = get_resource(test_info['wkt']) validated_wkt = get_resource(test_info['validated-wkt']) run_test_validate_wkt_valid_wkt(wkt, validated_wkt) + def test_wkt_validation_WKT_clamp_geometry(**args) -> None: """ Test asf_search.validate_wkt._get_clamped_and_wrapped_geometry, asserting the amount of clamped and wrapped coordinates """ - test_info = args["test_info"] + test_info = args['test_info'] wkt = get_resource(test_info['wkt']) clamped_wkt = get_resource(test_info['clamped-wkt']) clamped_count = get_resource(test_info['clamped-count']) wrapped_count = get_resource(test_info['wrapped-count']) run_test_validate_wkt_clamp_geometry(wkt, clamped_wkt, clamped_count, wrapped_count) + def test_wkt_validation_convex_hull(**args) -> None: """ Test asf_search.validate_wkt._get_convex_hull, asserting convex hulls producted are expected """ - test_info = args["test_info"] + test_info = args['test_info'] wkt = get_resource(test_info['wkt']) convex_wkt = get_resource(test_info['convex-wkt']) run_test_validate_wkt_convex_hull(wkt, convex_wkt) + def test_wkt_validation_merge_overlapping_geometry(**args) -> None: """ Test asf_search.validate_wkt._merge_overlapping_geometry, asserting expected shapes are merged """ - test_info = args["test_info"] + test_info = args['test_info'] wkt = get_resource(test_info['wkt']) merged_wkt = get_resource(test_info['merged-wkt']) run_test_validate_wkt_merge_overlapping_geometry(wkt, merged_wkt) + def test_wkt_validation_counter_clockwise_reorientation(**args) -> None: """ Test asf_search.validate_wkt._counter_clockwise_reorientation reverses polygon orientation if polygon is wound clockwise, and maintains counter-clockwise winding when polygon orientation is correct """ - test_info = args["test_info"] + test_info = args['test_info'] wkt = get_resource(test_info['wkt']) cc_wkt = get_resource(test_info['cc-wkt']) run_test_validate_wkt_counter_clockwise_reorientation(wkt, cc_wkt) + def test_validate_wkt_get_shape_coords(**args) -> None: """ Test asf_search.validate_wkt._get_shape_coords asserting all coordinates are returned and expected """ - test_info = args["test_info"] + test_info = args['test_info'] wkt = get_resource(test_info['wkt']) coords = get_resource(test_info['coordinates']) run_test_validate_wkt_get_shape_coords(wkt, coords) + def test_search_wkt_prep(**args) -> None: """ Test asf_search.validate_wkt.wkt_prep, asserting returned shape is correct geometric type and expected shape """ - test_info = args["test_info"] + test_info = args['test_info'] wkt = get_resource(test_info['wkt']) run_test_search_wkt_prep(wkt) + def test_simplify_aoi(**args) -> None: """ Test asf_search.validate_wkt.wkt_prep, asserting returned shape is correct geometric type and expected shape """ - test_info = args["test_info"] + test_info = args['test_info'] wkt = get_resource(test_info['wkt']) - simplified = get_resource(test_info["simplified-wkt"]) - RepairEntries = get_resource(test_info["RepairEntries"]) + simplified = get_resource(test_info['simplified-wkt']) + RepairEntries = get_resource(test_info['RepairEntries']) run_test_simplify_aoi(wkt, simplified, RepairEntries) + def test_get_platform_campaign_names(**args) -> None: - test_info = args["test_info"] - cmr_ummjson = get_resource(test_info["cmr_ummjson"]) - campaigns: List[str] = get_resource(test_info["campaigns"]) + test_info = args['test_info'] + cmr_ummjson = get_resource(test_info['cmr_ummjson']) + campaigns: List[str] = get_resource(test_info['campaigns']) run_test_get_project_names(cmr_ummjson, campaigns) + def test_download_url(**args) -> None: """ Test asf_search.download.download_url """ - test_info = args["test_info"] - url = test_info["url"] - path = test_info["path"] - filename = test_info["filename"] + test_info = args['test_info'] + url = test_info['url'] + path = test_info['path'] + filename = test_info['filename'] - if filename == "error": + if filename == 'error': run_test_download_url_auth_error(url, path, filename) else: run_test_download_url(url, path, filename) + def test_find_new_reference(**args) -> None: """ Test asf_search.baseline.calc.find_new_reference """ - test_info = args["test_info"] - stack = get_resource(test_info["stack"]) - output_index = get_resource(test_info["output_index"]) + test_info = args['test_info'] + stack = get_resource(test_info['stack']) + output_index = get_resource(test_info['output_index']) run_test_find_new_reference(stack, output_index) + def test_get_default_product_type(**args) -> None: - test_info = args["test_info"] - product = get_resource(test_info["product"]) - product_type = get_resource(test_info["product_type"]) + test_info = args['test_info'] + product = get_resource(test_info['product']) + product_type = get_resource(test_info['product_type']) product = as_ASFProduct({'meta': product['meta'], 'umm': product['umm']}, ASFSession()) @@ -371,31 +454,35 @@ def test_get_default_product_type(**args) -> None: run_test_get_default_product_type(product, product_type) + def test_get_baseline_from_stack(**args) -> None: - test_info = args["test_info"] + test_info = args['test_info'] reference = get_resource(test_info['reference']) stack = get_resource(test_info['stack']) output_stack = get_resource(test_info['output_stack']) error = get_resource(test_info['error']) run_test_get_baseline_from_stack(reference, stack, output_stack, error) + def test_valid_state_vectors(**args) -> None: - test_info = args["test_info"] + test_info = args['test_info'] reference = get_resource(test_info['reference']) output = get_resource(test_info['output']) run_test_valid_state_vectors(reference, output) + def test_validator_map_validate(**args) -> None: - test_info = args["test_info"] + test_info = args['test_info'] key = get_resource(test_info['key']) value = get_resource(test_info['value']) output = get_resource(test_info['output']) run_test_validator_map_validate(key, value, output) + def test_ASFSearchOptions_validator(**args) -> None: - test_info = args["test_info"] + test_info = args['test_info'] validator_name = get_resource(test_info['validator']) param = safe_load_tuple(get_resource(test_info['input'])) output = safe_load_tuple(get_resource(test_info['output'])) @@ -406,19 +493,25 @@ def test_ASFSearchOptions_validator(**args) -> None: def test_ASFSearchOptions(**kwargs) -> None: run_test_ASFSearchOptions(**kwargs) + def test_ASFSearchResults_intersection(**kwargs) -> None: wkt = get_resource(kwargs['test_info']['wkt']) run_test_ASFSearchResults_intersection(wkt) + def test_search_dataset(**kwargs) -> None: dataset = get_resource(kwargs['test_info']['dataset']) run_test_dataset_search(dataset) + def test_build_subqueries(**kwargs) -> None: params = ASFSearchOptions(**get_resource(kwargs['test_info']['params'])) - expected = [ASFSearchOptions(**subquery) for subquery in get_resource(kwargs['test_info']['expected'])] + expected = [ + ASFSearchOptions(**subquery) for subquery in get_resource(kwargs['test_info']['expected']) + ] run_test_build_subqueries(params, expected) + def test_serialization(**args) -> None: test_info = args['test_info'] product = get_resource(test_info.get('product')) @@ -428,6 +521,7 @@ def test_serialization(**args) -> None: run_test_serialization(product, results, options) + def test_notebook_examples(**args) -> None: test_info = args['test_info'] notebook_file = test_info['notebook'] @@ -437,13 +531,14 @@ def test_notebook_examples(**args) -> None: notebook = nbformat.read(f, as_version=4) ep = ExecutePreprocessor(timeout=600) try: - assert ep.preprocess(notebook) != None, f"Got empty notebook for {notebook_file}" + assert ep.preprocess(notebook) is not None, f'Got empty notebook for {notebook_file}' except Exception as e: - assert False, f"Failed executing {notebook_file}: {e}" + assert False, f'Failed executing {notebook_file}: {e}' # Testing resource loading utilities + def safe_load_tuple(param): """ loads a tuple from a list if a param is an object with key 'tuple' @@ -452,11 +547,12 @@ def safe_load_tuple(param): """ if isinstance(param, Dict): - if "tuple" in param.keys(): + if 'tuple' in param.keys(): param = tuple(param['tuple']) return param + # def test_output_format(**args) -> None: # test_info = args['test_info'] @@ -467,6 +563,7 @@ def safe_load_tuple(param): # run_test_output_format(results) + def test_keyword_aliasing_results(**args) -> None: test_info = args['test_info'] @@ -474,23 +571,22 @@ def test_keyword_aliasing_results(**args) -> None: opts.maxResults = 250 run_test_keyword_aliasing_results(opts) - + # Finds and loads file from yml_tests/Resouces/ if loaded field ends with .yml/yaml extension def get_resource(yml_file): - if isinstance(yml_file, str): - if yml_file.endswith((".yml", ".yaml")): + if yml_file.endswith(('.yml', '.yaml')): base_path = pathlib.Path(__file__).parent.resolve() - with open(os.path.join(base_path, "yml_tests", "Resources", yml_file), "r") as f: + with open(os.path.join(base_path, 'yml_tests', 'Resources', yml_file), 'r') as f: try: return yaml.safe_load(f) except yaml.YAMLError as exc: print(exc) - elif isinstance(yml_file, List): #check if it's a list of yml files + elif isinstance(yml_file, List): # check if it's a list of yml files if len(yml_file) > 0: if isinstance(yml_file[0], str): - if yml_file[0].endswith((".yml", ".yaml")): + if yml_file[0].endswith(('.yml', '.yaml')): return [get_resource(file) for file in yml_file] return yml_file From f2c51331dc4fede446de84ae54cf558e649c3a2a Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 15:15:42 -0800 Subject: [PATCH 37/40] tests dir added to linting workflow, change wording on deprecation warning for cmr_host --- .github/workflows/lint.yml | 2 -- asf_search/ASFSession.py | 8 ++++---- tests/ASFProduct/test_ASFProduct.py | 9 ++++----- tests/ASFSearchOptions/test_ASFSearchOptions.py | 4 ++-- tests/BaselineSearch/Stack/test_stack.py | 6 +++--- tests/CMR/test_MissionList.py | 4 ++-- tests/Search/test_search.py | 12 ++++++------ tests/Search/test_search_generator.py | 10 ++++++---- tests/Serialization/test_serialization.py | 2 +- tests/download/test_download.py | 4 ++-- 10 files changed, 30 insertions(+), 31 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d7dd83ff..4956a748 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,5 +7,3 @@ jobs: steps: - uses: actions/checkout@v4 - uses: chartboost/ruff-action@v1 - with: - src: asf_search diff --git a/asf_search/ASFSession.py b/asf_search/ASFSession.py index ca6c13f5..0ce1ed75 100644 --- a/asf_search/ASFSession.py +++ b/asf_search/ASFSession.py @@ -6,7 +6,7 @@ from asf_search import ASF_LOGGER, __name__ as asf_name, __version__ as asf_version from asf_search.exceptions import ASFAuthenticationError -from warnings import warn + class ASFSession(requests.Session): @@ -88,10 +88,10 @@ def __init__( self.cmr_host = INTERNAL.CMR_HOST if cmr_host is not None: - warn( + ASF_LOGGER.warning( 'Use of `cmr_host` keyword with `ASFSession` is deprecated ' 'for asf-search versions >= 7.0.9, ' - 'and will be removed with the next major version.' + 'and may be removed in a future major release.' '\nTo authenticate an EDL token for a non-prod deployment of CMR, ' 'set the `edl_host` keyword instead. ' '\n(ex: session arugments for authenticating against uat: ' @@ -190,7 +190,7 @@ def _try_legacy_token_auth(self, token: str) -> False: Checks `cmr_host` search endpoint directly with provided token using method used in previous versions of asf-search (<7.0.9). - This is to prevent breaking changes until next major release + This may be removed in a future release """ from asf_search.constants import INTERNAL diff --git a/tests/ASFProduct/test_ASFProduct.py b/tests/ASFProduct/test_ASFProduct.py index 13922f37..65ce53df 100644 --- a/tests/ASFProduct/test_ASFProduct.py +++ b/tests/ASFProduct/test_ASFProduct.py @@ -1,4 +1,3 @@ -import logging import pytest import unittest @@ -58,8 +57,8 @@ def run_test_stack(reference, pre_processed_stack, processed_stack): stack = [ product for product in stack - if product.properties['temporalBaseline'] != None - and product.properties['perpendicularBaseline'] != None + if product.properties['temporalBaseline'] is not None + and product.properties['perpendicularBaseline'] is not None ] for idx, secondary in enumerate(stack): @@ -96,8 +95,8 @@ def run_test_ASFProduct_download(reference, filename, filetype, additional_urls) mock_get.return_value = resp resp.iter_content = lambda chunk_size: [] - with patch('builtins.open', unittest.mock.mock_open()) as m: - if filename != None and ( + with patch('builtins.open', unittest.mock.mock_open()): + if filename is not None and ( (filetype == FileDownloadType.ADDITIONAL_FILES and len(additional_urls) > 1) or (filetype == FileDownloadType.ALL_FILES and len(additional_urls) > 0) ): diff --git a/tests/ASFSearchOptions/test_ASFSearchOptions.py b/tests/ASFSearchOptions/test_ASFSearchOptions.py index 0ac863bb..c58f0a3a 100644 --- a/tests/ASFSearchOptions/test_ASFSearchOptions.py +++ b/tests/ASFSearchOptions/test_ASFSearchOptions.py @@ -25,7 +25,7 @@ def run_test_validator_map_validate(key, value, output): def run_test_ASFSearchOptions_validator(validator_name, param, output, error): validator = getattr(validators, validator_name) - if error == None: + if error is None: assert output == validator(param) else: with raises(ValueError) as e: @@ -54,7 +54,7 @@ def run_test_ASFSearchOptions(**kwargs): return else: assert ( - exception == None + exception is None ), f'ERROR: Expected exception {exception}, but SearchOptions never threw.' for key, val in expect_output.items(): diff --git a/tests/BaselineSearch/Stack/test_stack.py b/tests/BaselineSearch/Stack/test_stack.py index 985c31e4..cb877b4e 100644 --- a/tests/BaselineSearch/Stack/test_stack.py +++ b/tests/BaselineSearch/Stack/test_stack.py @@ -13,7 +13,7 @@ def run_test_find_new_reference(stack: List, output_index: Number) -> None: """ if stack == []: - assert find_new_reference(stack) == None + assert find_new_reference(stack) is None else: products = [as_ASFProduct(product, ASFSession()) for product in stack] for idx, product in enumerate(products): @@ -32,7 +32,7 @@ def run_test_get_baseline_from_stack(reference, stack, output_stack, error): reference = as_ASFProduct(reference, ASFSession()) stack = ASFSearchResults([as_ASFProduct(product, ASFSession()) for product in stack]) - if error == None: + if error is None: stack, warnings = get_baseline_from_stack(reference, stack) keys = ['sceneName', 'perpendicularBaseline', 'temporalBaseline'] @@ -55,7 +55,7 @@ def run_test_get_baseline_from_stack(reference, stack, output_stack, error): def run_test_valid_state_vectors(reference, output): - if reference != None: + if reference is not None: product = as_ASFProduct(reference, ASFSession()) clear_baseline(reference, product) assert output == product.is_valid_reference() diff --git a/tests/CMR/test_MissionList.py b/tests/CMR/test_MissionList.py index 7abb32c6..73be2bd6 100644 --- a/tests/CMR/test_MissionList.py +++ b/tests/CMR/test_MissionList.py @@ -11,7 +11,7 @@ def test_getMissions_error(): with requests_mock.Mocker() as m: m.register_uri( 'POST', - f'https://' + CMR_HOST + CMR_COLLECTIONS_PATH, + 'https://' + CMR_HOST + CMR_COLLECTIONS_PATH, status_code=300, json={'error': {'report': ''}}, ) @@ -22,7 +22,7 @@ def test_getMissions_error(): def test_getMissions_error_parsing(): with requests_mock.Mocker() as m: - m.post(f'https://' + CMR_HOST + CMR_COLLECTIONS_PATH) + m.post('https://' + CMR_HOST + CMR_COLLECTIONS_PATH) with pytest.raises(CMRError): get_campaigns({}) diff --git a/tests/Search/test_search.py b/tests/Search/test_search.py index e6fca4f0..65ae23cd 100644 --- a/tests/Search/test_search.py +++ b/tests/Search/test_search.py @@ -4,7 +4,7 @@ # from asf_search.CMR.translate import get from tenacity import retry, retry_if_exception_type, stop_after_attempt -from asf_search import ASF_LOGGER, ASFSearchOptions +from asf_search import ASF_LOGGER from asf_search.CMR.subquery import build_subqueries from asf_search.CMR.translate import try_parse_date from asf_search.constants import INTERNAL @@ -73,10 +73,10 @@ def run_test_search_http_error(search_parameters, status_code: Number, report: s status_code=status_code, json={'errors': {'report': report}}, ) - m.register_uri('POST', f'https://search-error-report.asf.alaska.edu/', real_http=True) + m.register_uri('POST', 'https://search-error-report.asf.alaska.edu/', real_http=True) searchOptions = ASFSearchOptions(**search_parameters) with raises(ASFSearchError): - results = search(opts=searchOptions) + search(opts=searchOptions) return # If we're not doing an empty search we want to fire off one real query to CMR, then interrupt it with an error @@ -101,13 +101,13 @@ def custom_matcher(request: requests.Request): status_code=status_code, json={'errors': {'report': report}}, ) - m.register_uri('POST', f'https://search-error-report.asf.alaska.edu/', real_http=True) + m.register_uri('POST', 'https://search-error-report.asf.alaska.edu/', real_http=True) search_parameters['maxResults'] = INTERNAL.CMR_PAGE_SIZE + 1 searchOptions = ASFSearchOptions(**search_parameters) with raises(ASFSearchError): - results = search(opts=searchOptions) + search(opts=searchOptions) def run_test_dataset_search(datasets: List): @@ -170,7 +170,7 @@ def run_test_keyword_aliasing_results(params: ASFSearchOptions): try: api_response = query_endpoint(dict(params)) - except requests.ReadTimeout as exc: + except requests.ReadTimeout: ASF_LOGGER.warn(f'SearchAPI timed out, skipping test for params {str(params)}') return diff --git a/tests/Search/test_search_generator.py b/tests/Search/test_search_generator.py index 196ac7cc..11b56bf2 100644 --- a/tests/Search/test_search_generator.py +++ b/tests/Search/test_search_generator.py @@ -1,10 +1,12 @@ -from asf_search.search.search_generator import * + from asf_search import ASFSearchOptions, ASFSearchResults from asf_search import INTERNAL from typing import List import math +from asf_search.search import search_generator, preprocess_opts + def run_test_search_generator_multi(search_opts: List[ASFSearchOptions]): queries = [search_generator(opts=opts) for opts in search_opts] @@ -29,7 +31,7 @@ def run_test_search_generator_multi(search_opts: List[ASFSearchOptions]): queries_iter = iter(queries) for idx, query in enumerate(queries_iter): # Alternate pages between results page = next(query, None) - if page != None: + if page is not None: combined_results.extend(page) page_count += 1 if page.searchComplete: @@ -41,7 +43,7 @@ def run_test_search_generator_multi(search_opts: List[ASFSearchOptions]): else: queries[idx] = None - queries = [query for query in queries if query != None] + queries = [query for query in queries if query is not None] assert page_count == expected_page_count assert len(combined_results) == expected_results_size @@ -64,7 +66,7 @@ def run_test_search_generator(search_opts: ASFSearchOptions): assert page_count <= page_idx assert len(results) <= search_opts.maxResults - assert results.searchComplete == True + assert results.searchComplete preprocess_opts(search_opts) diff --git a/tests/Serialization/test_serialization.py b/tests/Serialization/test_serialization.py index 011aa422..4339df0e 100644 --- a/tests/Serialization/test_serialization.py +++ b/tests/Serialization/test_serialization.py @@ -1,4 +1,4 @@ -from asf_search import ASFProduct, ASFSearchResults, ASFSession +from asf_search import ASFSearchResults, ASFSession from asf_search.ASFSearchOptions.ASFSearchOptions import ASFSearchOptions import os diff --git a/tests/download/test_download.py b/tests/download/test_download.py index 845515b8..95e07975 100644 --- a/tests/download/test_download.py +++ b/tests/download/test_download.py @@ -50,7 +50,7 @@ def run_test_download_url(url, path, filename): mock_get_burst.return_value = resp_2 resp_2.iter_content = lambda chunk_size: [] - with patch('builtins.open', unittest.mock.mock_open()) as m: + with patch('builtins.open', unittest.mock.mock_open()): download_url(url, path, filename) else: with patch('asf_search.ASFSession.get') as mock_get: @@ -59,5 +59,5 @@ def run_test_download_url(url, path, filename): mock_get.return_value = resp resp.iter_content = lambda chunk_size: [] - with patch('builtins.open', unittest.mock.mock_open()) as m: + with patch('builtins.open', unittest.mock.mock_open()): download_url(url, path, filename) From 8c4e8ddfc81cf616db8c3cfee0c82702caef396f Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 15:22:08 -0800 Subject: [PATCH 38/40] update import, ASFSession uses warn again --- asf_search/ASFSession.py | 7 ++++--- asf_search/search/__init__.py | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/asf_search/ASFSession.py b/asf_search/ASFSession.py index 0ce1ed75..bab3afb2 100644 --- a/asf_search/ASFSession.py +++ b/asf_search/ASFSession.py @@ -1,3 +1,4 @@ +from logging import warn import platform from typing import List, Union import requests @@ -6,7 +7,7 @@ from asf_search import ASF_LOGGER, __name__ as asf_name, __version__ as asf_version from asf_search.exceptions import ASFAuthenticationError - +from warnings import warn class ASFSession(requests.Session): @@ -88,7 +89,7 @@ def __init__( self.cmr_host = INTERNAL.CMR_HOST if cmr_host is not None: - ASF_LOGGER.warning( + warn( 'Use of `cmr_host` keyword with `ASFSession` is deprecated ' 'for asf-search versions >= 7.0.9, ' 'and may be removed in a future major release.' @@ -140,7 +141,7 @@ def auth_with_creds(self, username: str, password: str): token = self.cookies.get_dict().get('urs-access-token') if token is None: - ASF_LOGGER.warning( + warn( f'Provided asf_auth_host "{self.asf_auth_host}" returned no EDL token ' 'during ASFSession validation. EDL Token expected in "urs-access-token" cookie, ' 'required for hidden/restricted dataset access. ' diff --git a/asf_search/search/__init__.py b/asf_search/search/__init__.py index 133b1f09..35cf590c 100644 --- a/asf_search/search/__init__.py +++ b/asf_search/search/__init__.py @@ -5,4 +5,4 @@ from .baseline_search import stack_from_id # noqa: F401 from .campaigns import campaigns # noqa: F401 from .search_count import search_count # noqa: F401 -from .search_generator import search_generator # noqa: F401 +from .search_generator import search_generator, preprocess_opts # noqa: F401 From 54affe8e18dca3ee1890f8022f11bdcd1bec4590 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 15:26:35 -0800 Subject: [PATCH 39/40] changes warn() import --- asf_search/ASFSession.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/asf_search/ASFSession.py b/asf_search/ASFSession.py index bab3afb2..017dfd0b 100644 --- a/asf_search/ASFSession.py +++ b/asf_search/ASFSession.py @@ -7,7 +7,7 @@ from asf_search import ASF_LOGGER, __name__ as asf_name, __version__ as asf_version from asf_search.exceptions import ASFAuthenticationError -from warnings import warn +import warnings class ASFSession(requests.Session): @@ -89,7 +89,7 @@ def __init__( self.cmr_host = INTERNAL.CMR_HOST if cmr_host is not None: - warn( + warnings.warn( 'Use of `cmr_host` keyword with `ASFSession` is deprecated ' 'for asf-search versions >= 7.0.9, ' 'and may be removed in a future major release.' From 74325d975c5de511fbccc1ea977dfd93d2b04499 Mon Sep 17 00:00:00 2001 From: kim Date: Thu, 8 Aug 2024 09:37:00 -0800 Subject: [PATCH 40/40] re-adds lat 64-65 test case, removes asf-search v searchAPI export format tests --- tests/yml_tests/test_ASFSearchResults.yml | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/tests/yml_tests/test_ASFSearchResults.yml b/tests/yml_tests/test_ASFSearchResults.yml index 046d2df5..6fed19aa 100644 --- a/tests/yml_tests/test_ASFSearchResults.yml +++ b/tests/yml_tests/test_ASFSearchResults.yml @@ -17,8 +17,8 @@ tests: - Test ASFSearchResults_intersection linestring-equator: wkt: LINESTRING(-59 0, 59 0) -# - Test ASFSearchResults_intersection Latitude 64 to 65: -# wkt: POLYGON((-179 64, -159 64, -159 65, -179 65, -179 64)) +- Test ASFSearchResults_intersection Latitude 64 to 65: + wkt: POLYGON((-179 64, -159 64, -159 65, -179 65, -179 64)) - Test ASFSearchResults_intersection Australia: wkt: POLYGON((108.3836 -45.0288,154.4383 -45.0288,154.4383 -9.7116,108.3836 -9.7116,108.3836 -45.0288)) @@ -28,18 +28,3 @@ tests: - Test ASFSearchResults_intersection antimeridian: wkt: POLYGON((-181 -89, -179 -89, -179 89, -181 89, -181 -89)) - -# - Test ASFSearchResults-format Fairbanks slc: -# results: [Fairbanks_SLC.yml] - -# - Test ASFSearchResults-format Fairbanks S1 Stack: -# results: Fairbanks_S1_stack.yml - -# - Test ASFSearchResults-format Alos: -# results: Alos_response.yml - -# - Test ASFSearchResults-format L1: -# results: Fairbanks_L1.yml - -# - Test ASFSearchResults-format ERS Stack: -# results: Fairbanks_ers_stack.yml