From 01381efb5b39dd96b0ec86ba705e2a98d510ab65 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Tue, 11 Jul 2023 09:24:47 -0800 Subject: [PATCH 01/30] Moved logging up, so if version check fails, we can still log it --- asf_search/__init__.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/asf_search/__init__.py b/asf_search/__init__.py index d6fab9d7..66d755ac 100644 --- a/asf_search/__init__.py +++ b/asf_search/__init__.py @@ -1,6 +1,12 @@ # backport of importlib.metadata for python < 3.8 from importlib_metadata import PackageNotFoundError, version + +## Setup logging now, so it's available if __version__ fails: import logging +ASF_LOGGER = logging.getLogger(__name__) +# Add null handle so we do nothing by default. It's up to whatever +# imports us, if they want logging. +ASF_LOGGER.addHandler(logging.NullHandler()) try: __version__ = version(__name__) @@ -14,11 +20,6 @@ ASF_LOGGER.exception(msg) raise PackageNotFoundError("Install with 'python3 -m pip install -e .' to use") from e -ASF_LOGGER = logging.getLogger(__name__) -# Add null handle so we do nothing by default. It's up to whatever -# imports us, if they want logging. -ASF_LOGGER.addHandler(logging.NullHandler()) - from .ASFSession import ASFSession from .ASFProduct import ASFProduct from .ASFSearchResults import ASFSearchResults From 8816940c0a15cdbb5f1ae2f24bdacd011880d238 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Tue, 11 Jul 2023 11:54:13 -0800 Subject: [PATCH 02/30] Moved logging up, to log when setup doesn't work correctly --- asf_search/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asf_search/__init__.py b/asf_search/__init__.py index 66d755ac..9b61ad30 100644 --- a/asf_search/__init__.py +++ b/asf_search/__init__.py @@ -23,7 +23,7 @@ from .ASFSession import ASFSession from .ASFProduct import ASFProduct from .ASFSearchResults import ASFSearchResults -from .ASFSearchOptions import ASFSearchOptions, validators +from .ASFSearchOptions import ASFSearchOptions, validators, validator_map from .exceptions import * from .constants import * from .health import * From 8ea19aee3beb3e857dc2d9313bc5abea6532770b Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Wed, 9 Aug 2023 11:12:23 -0800 Subject: [PATCH 03/30] update for using asf_search in SearchAPI --- asf_search/ASFSearchOptions/validators.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index b4b6f88f..d83ee105 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -21,7 +21,7 @@ def parse_string(value: str) -> str: except ValueError as exc: # If this happens, printing v's value would fail too... raise ValueError(f"Invalid string: Can't cast type '{type(value)}' to string.") from exc if len(value) == 0: - raise ValueError(f'Invalid string: Empty.') + raise ValueError('Invalid string: Empty.') return value @@ -35,7 +35,7 @@ def parse_float(value: float) -> float: value = float(value) except ValueError as exc: raise ValueError(f'Invalid float: {value}') from exc - if math.isinf(value): + if math.isinf(value) or math.isnan(value): raise ValueError(f'Float values must be finite: got {value}') return value From c91bb79b468a970f13abea0c9b8c2374498690d2 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Thu, 10 Aug 2023 12:43:17 -0800 Subject: [PATCH 04/30] Adding in support for 'circle' keyword. Exposed in ASFSearchOptions, and uses CMR's key to search for results --- asf_search/ASFSearchOptions/validator_map.py | 3 ++- asf_search/ASFSearchOptions/validators.py | 8 ++++++++ asf_search/CMR/field_map.py | 1 + asf_search/CMR/subquery.py | 3 ++- asf_search/CMR/translate.py | 5 +++++ asf_search/search/search_generator.py | 8 ++++---- tests/yml_tests/test_ASFSearchOptions.yml | 6 ++++++ tests/yml_tests/test_search.yml | 7 +++++++ 8 files changed, 35 insertions(+), 6 deletions(-) diff --git a/asf_search/ASFSearchOptions/validator_map.py b/asf_search/ASFSearchOptions/validator_map.py index 64162bc1..6ada5cd0 100644 --- a/asf_search/ASFSearchOptions/validator_map.py +++ b/asf_search/ASFSearchOptions/validator_map.py @@ -3,7 +3,7 @@ from .validators import ( parse_string, parse_float, parse_wkt, parse_date, parse_string_list, parse_int_list, parse_int_or_range_list, - parse_float_or_range_list, + parse_float_or_range_list, parse_circle, parse_session ) @@ -32,6 +32,7 @@ def validate(key, value): 'beamMode': parse_string_list, 'beamSwath': parse_string_list, 'campaign': parse_string, + 'circle': parse_circle, 'maxDoppler': parse_float, 'minDoppler': parse_float, 'maxFaradayRotation': parse_float, diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index d83ee105..a9c8e87a 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -189,6 +189,14 @@ def parse_wkt(value: str) -> str: raise ValueError(f'Invalid wkt: {exc}') from exc return wkt.dumps(value) +# Parse a CMR circle: +# [longitude, latitude, radius(meters)] +def parse_circle(value: List[float]) -> str: + value = parse_float_list(value) + if len(value) != 3: + raise ValueError(f'Invalid circle, must be 3 values (lat, long, radius). Got: {value}') + return value + # Take "requests.Session", or anything that subclasses it: def parse_session(session: Type[requests.Session]): if issubclass(type(session), requests.Session): diff --git a/asf_search/CMR/field_map.py b/asf_search/CMR/field_map.py index 561ede0a..49bdbdf2 100644 --- a/asf_search/CMR/field_map.py +++ b/asf_search/CMR/field_map.py @@ -9,6 +9,7 @@ 'beamMode': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE,{0}'}, 'beamSwath': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE_TYPE,{0}'}, 'campaign': {'key': 'attribute[]', 'fmt': 'string,MISSION_NAME,{0}'}, + 'circle': {'key': 'circle', 'fmt': '{0}'}, 'maxDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,,{0}'}, 'minDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,{0},'}, 'maxFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,,{0}'}, diff --git a/asf_search/CMR/subquery.py b/asf_search/CMR/subquery.py index f37a0ef0..84da799f 100644 --- a/asf_search/CMR/subquery.py +++ b/asf_search/CMR/subquery.py @@ -22,7 +22,7 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: if params.get('product_list') is not None: params['product_list'] = chunk_list(params['product_list'], CMR_PAGE_SIZE) - list_param_names = ['platform', 'season', 'collections'] # these parameters will dodge the subquery system + list_param_names = ['platform', 'season', 'collections', 'circle'] # these parameters will dodge the subquery system skip_param_names = ['maxResults']# these params exist in opts, but shouldn't be passed on to subqueries at ALL params = dict([ (k, v) for k, v in params.items() if k not in skip_param_names ]) @@ -37,6 +37,7 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: sub_queries = cartesian_product(subquery_params) final_sub_query_opts = [] + for query in sub_queries: q = dict() for p in query: diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index 867a68e7..7db41576 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -43,6 +43,10 @@ def translate_opts(opts: ASFSearchOptions) -> list: (shapeType, shape) = wkt_to_cmr_shape(shape).split(':') dict_opts[shapeType] = shape + if "circle" in dict_opts: + # Map: to convert floats to strings before joining: + dict_opts['circle'] = ','.join(map(str, dict_opts['circle'])) + # If you need to use the temporal key: if any(key in dict_opts for key in ['start', 'end', 'season']): dict_opts = fix_date(dict_opts) @@ -83,6 +87,7 @@ def translate_opts(opts: ASFSearchOptions) -> list: cmr_opts.extend(additional_keys) + print(f"cmr_opts: {cmr_opts}") return cmr_opts diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index 156e50d5..b43de86e 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -89,13 +89,13 @@ def search_generator( while(cmr_search_after_header is not None): try: items, subquery_max_results, cmr_search_after_header = query_cmr(opts.session, url, translated_opts, subquery_count) - except (ASFSearchError, CMRIncompleteError) as e: - message = str(e) + except (ASFSearchError, CMRIncompleteError) as exc: + message = str(exc) logging.error(message) report_search_error(query, message) opts.session.headers.pop('CMR-Search-After', None) - return - + raise + opts.session.headers.update({'CMR-Search-After': cmr_search_after_header}) last_page = process_page(items, maxResults, subquery_max_results, total, subquery_count, opts) subquery_count += len(last_page) diff --git a/tests/yml_tests/test_ASFSearchOptions.yml b/tests/yml_tests/test_ASFSearchOptions.yml index 9408283c..e4a0f061 100644 --- a/tests/yml_tests/test_ASFSearchOptions.yml +++ b/tests/yml_tests/test_ASFSearchOptions.yml @@ -161,3 +161,9 @@ tests: expect_output: host: does-not-exist.asf.alaska.edu provider: TOTALLY NOT ASF + +- test-ASFSearchOptions - Circle works: + exception: Null + circle: [0, 0, 100] + expect_output: + circle: [0, 0, 100] diff --git a/tests/yml_tests/test_search.yml b/tests/yml_tests/test_search.yml index 97449d63..87239864 100644 --- a/tests/yml_tests/test_search.yml +++ b/tests/yml_tests/test_search.yml @@ -42,3 +42,10 @@ tests: platform: "Sentinel-1" status_code: 500 report: "Server Error: This is a Test Error" + +- test-ASFSearch-search-error 400-Error circle radius too small: + parameters: + circle: [0, 0, 2] + status_code: 400 + report: "Circle radius must be between 10 and 6000000, but was 2.0" + From f0f9172653010eb72b1e98f22f4a3d5d49040dae Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Thu, 10 Aug 2023 12:52:59 -0800 Subject: [PATCH 05/30] Removed debug print I forgot about --- asf_search/CMR/translate.py | 1 - 1 file changed, 1 deletion(-) diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index 7db41576..9b691671 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -87,7 +87,6 @@ def translate_opts(opts: ASFSearchOptions) -> list: cmr_opts.extend(additional_keys) - print(f"cmr_opts: {cmr_opts}") return cmr_opts From 92e12b34f724f8806c88e393780a1ecb776473c3 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Wed, 16 Aug 2023 20:54:04 -0800 Subject: [PATCH 06/30] Exposed get_urls method, to use in SearchAPI. Fixed bug where fileType would throw keyerror if used on a non-burst --- asf_search/ASFProduct.py | 50 +++++++++++++++++++++++++--------------- 1 file changed, 32 insertions(+), 18 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index 67694e82..dfdf7475 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -24,6 +24,11 @@ def __init__(self, args: dict = {}, session: ASFSession = ASFSession()): self.baseline = translated['baseline'] self.session = session + if 'additionalUrls' not in self.properties or len(self.properties['additionalUrls']) == 0: + self.multiple_files = False + else: + self.multiple_files = True + def __str__(self): return json.dumps(self.geojson(), indent=2, sort_keys=True) @@ -48,41 +53,50 @@ def download(self, path: str, filename: str = None, session: ASFSession = None, default_filename = self.properties['fileName'] if filename is not None: - multiple_files = ( - (fileType == FileDownloadType.ADDITIONAL_FILES and len(self.properties['additionalUrls']) > 1) - or fileType == FileDownloadType.ALL_FILES - ) - if multiple_files: - warnings.warn(f"Attempting to download multiple files for product, ignoring user provided filename argument \"{filename}\", using default.") + # Check if we should support the filename argument: + if self.multiple_files and fileType in [FileDownloadType.ADDITIONAL_FILES, FileDownloadType.ALL_FILES]: + warnings.warn(f"Attempting to download multiple files for product, ignoring user provided filename argument '{filename}', using default.") else: default_filename = filename - + if session is None: session = self.session + urls = self.get_urls(fileType=fileType) + + for url in urls: + base_filename = '.'.join(default_filename.split('.')[:-1]) + extension = url.split('.')[-1] + download_url( + url=url, + path=path, + filename=f"{base_filename}.{extension}", + session=session + ) + + def get_urls(self, fileType = FileDownloadType.DEFAULT_FILE) -> list: urls = [] def get_additional_urls(): - output = [] - base_filename = '.'.join(default_filename.split('.')[:-1]) + if not self.multiple_files: + ASF_LOGGER.warning(f"You attempted to download multiple files from {self.properties['sceneName']}, this product only has one file to download.") + return [] + + additional_urls = [] for url in self.properties['additionalUrls']: - extension = url.split('.')[-1] - urls.append((f"{base_filename}.{extension}", url)) - - return output + additional_urls.append(url) + return additional_urls if fileType == FileDownloadType.DEFAULT_FILE: - urls.append((default_filename, self.properties['url'])) + urls.append(self.properties['url']) elif fileType == FileDownloadType.ADDITIONAL_FILES: urls.extend(get_additional_urls()) elif fileType == FileDownloadType.ALL_FILES: - urls.append((default_filename, self.properties['url'])) + urls.append(self.properties['url']) urls.extend(get_additional_urls()) else: raise ValueError("Invalid FileDownloadType provided, the valid types are 'DEFAULT_FILE', 'ADDITIONAL_FILES', and 'ALL_FILES'") - - for filename, url in urls: - download_url(url=url, path=path, filename=filename, session=session) + return urls def stack( self, From 5972616694087f3b9d4d09e4984e2800bb7c1eb3 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Wed, 16 Aug 2023 20:56:29 -0800 Subject: [PATCH 07/30] Made it clear that baseline doesn't support classic search options. Throws warning and wipes them if provided --- asf_search/search/baseline_search.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index 026137db..7fccb849 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -1,8 +1,10 @@ -from asf_search.baseline.stack import get_baseline_from_stack, get_default_product_type + from copy import copy +from asf_search import ASF_LOGGER +from asf_search.baseline.stack import get_baseline_from_stack, get_default_product_type from asf_search.search import search, product_search -from asf_search.ASFSearchOptions import ASFSearchOptions +from asf_search.ASFSearchOptions import ASFSearchOptions, config from asf_search.ASFSearchResults import ASFSearchResults from asf_search.ASFProduct import ASFProduct from asf_search.constants import PLATFORM @@ -30,9 +32,6 @@ def stack_from_product( :return: ASFSearchResults(dict) of search results """ - - opts = (ASFSearchOptions() if opts is None else copy(opts)) - stack_opts = get_stack_opts(reference, opts=opts) stack = search(opts=stack_opts) @@ -58,10 +57,8 @@ def stack_from_id( :return: ASFSearchResults(list) of search results """ - opts = (ASFSearchOptions() if opts is None else copy(opts)) - reference_results = product_search(product_list=reference_id, opts=opts) reference_results.raise_if_incomplete() @@ -78,7 +75,15 @@ def get_stack_opts( opts: ASFSearchOptions = None ) -> ASFSearchOptions: - stack_opts = (ASFSearchOptions() if opts is None else copy(opts)) + if opts is None: + stack_opts = ASFSearchOptions() + else: + stack_opts = copy(opts) + # If they set any search-specific keys inside the opts (exclude 'provider' and such): + if stack_opts: + ASF_LOGGER.warning(f'Baseline search options provided, but only the service config options will be used. [{config.config.keys()}]') + stack_opts.reset_search() + stack_opts.processingLevel = get_default_product_type(reference) if reference.properties['platform'] in precalc_platforms: @@ -88,7 +93,7 @@ def get_stack_opts( raise ASFBaselineError(f'Requested reference product needs a baseline stack ID but does not have one: {reference.properties["fileID"]}') # build a stack from scratch if it's a non-precalc dataset with state vectors - + if reference.properties['processingLevel'] == 'BURST': stack_opts.fullBurstID = reference.properties['burst']['fullBurstID'] stack_opts.polarization = [reference.properties['polarization']] From d19a24f543ab355998354577d8b2df5cb8536f55 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Fri, 18 Aug 2023 12:19:04 -0800 Subject: [PATCH 08/30] Updated with where this branch is currently at --- CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 540279f5..e975886a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,17 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - --> +------ +## [TODO](https://github.com/asfadmin/Discovery-asf_search/compare/v6.6.2...TODO) +### Added +- Added `asf.ASFSearchOptions(circle=[lat, long, radius])` search param. Takes list of exactly 3 numbers. +- Exposed `asf.validator_map`, which given a ops search param, can be used to look up which method we're going to validate it against. +- Exposed `ASFProduct.get_urls` which returns the URL's for it's products directly. Can control which products with the `fileType` enum. +### Fixed +- Fixed bug in `ASFProduct` where asking for `asf.ADDITIONAL_FILES` on non-burst products would throw a KeyError. +### Changed +- `stack_from_id()` now raises if results are incomplete, before checking if reference was found + ------ ## [v6.6.2](https://github.com/asfadmin/Discovery-asf_search/compare/v6.6.1...v6.6.2) ### Added From 9dc3a3396323cb4277c439b6a51c893f3f35b808 Mon Sep 17 00:00:00 2001 From: Cameron Showalter Date: Thu, 31 Aug 2023 11:36:07 -0800 Subject: [PATCH 09/30] Added circle key to search, useable without opts. Also made it so generator won't throw on incomplete results, but .search directly will --- asf_search/search/search.py | 5 +++++ asf_search/search/search_generator.py | 9 ++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/asf_search/search/search.py b/asf_search/search/search.py index bf7b0529..c2967d7e 100644 --- a/asf_search/search/search.py +++ b/asf_search/search/search.py @@ -12,6 +12,7 @@ def search( beamMode: Union[str, Iterable[str]] = None, beamSwath: Union[str, Iterable[str]] = None, campaign: Union[str, Iterable[str]] = None, + circle: Tuple[float, float, float] = None, maxDoppler: float = None, minDoppler: float = None, end: Union[datetime.datetime, str] = None, @@ -51,6 +52,7 @@ def search( :param beamMode: The beam mode used to acquire the data. :param beamSwath: Encompasses a look angle and beam mode. :param campaign: For UAVSAR and AIRSAR data collections only. Search by general location, site description, or data grouping as supplied by flight agency or project. + :param circle: Search by circle defined by list of three floats: [longitude, latitude, radius in meters] :param maxDoppler: Doppler provides an indication of how much the look direction deviates from the ideal perpendicular flight direction acquisition. :param minDoppler: Doppler provides an indication of how much the look direction deviates from the ideal perpendicular flight direction acquisition. :param end: End date of data acquisition. Supports timestamps as well as natural language such as "3 weeks ago" @@ -95,6 +97,9 @@ def search( results.searchComplete = page.searchComplete results.searchOptions = page.searchOptions + # Raise if they didn't get everything. If you're okay with partial + # results, use asf.search_generator directly + results.raise_if_incomplete() results.sort(key=lambda p: (p.properties['stopTime'], p.properties['fileID']), reverse=True) return results diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index b43de86e..d0dd2a22 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -27,6 +27,7 @@ def search_generator( beamMode: Union[str, Iterable[str]] = None, beamSwath: Union[str, Iterable[str]] = None, campaign: Union[str, Iterable[str]] = None, + circle: Tuple[float, float, float] = None, maxDoppler: float = None, minDoppler: float = None, end: Union[datetime.datetime, str] = None, @@ -94,7 +95,13 @@ def search_generator( logging.error(message) report_search_error(query, message) opts.session.headers.pop('CMR-Search-After', None) - raise + # If it's a CMRIncompleteError, we can just stop here and return what we have + # It's up to the user to call .raise_if_incomplete() if they're using the + # generator directly. + if type(exc) == CMRIncompleteError: + return + else: + raise opts.session.headers.update({'CMR-Search-After': cmr_search_after_header}) last_page = process_page(items, maxResults, subquery_max_results, total, subquery_count, opts) From 45460e3edab3b995db3c75ad7720ce521b9d31af Mon Sep 17 00:00:00 2001 From: kim Date: Thu, 31 Aug 2023 16:18:11 -0800 Subject: [PATCH 10/30] updates download file test case. Adds _has_multiple_files() method to ASFProduct --- asf_search/ASFProduct.py | 12 +++++------- tests/ASFProduct/test_ASFProduct.py | 2 +- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index dfdf7475..04de72df 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -24,11 +24,6 @@ def __init__(self, args: dict = {}, session: ASFSession = ASFSession()): self.baseline = translated['baseline'] self.session = session - if 'additionalUrls' not in self.properties or len(self.properties['additionalUrls']) == 0: - self.multiple_files = False - else: - self.multiple_files = True - def __str__(self): return json.dumps(self.geojson(), indent=2, sort_keys=True) @@ -54,7 +49,7 @@ def download(self, path: str, filename: str = None, session: ASFSession = None, if filename is not None: # Check if we should support the filename argument: - if self.multiple_files and fileType in [FileDownloadType.ADDITIONAL_FILES, FileDownloadType.ALL_FILES]: + if self._has_multiple_files() and fileType in [FileDownloadType.ADDITIONAL_FILES, FileDownloadType.ALL_FILES]: warnings.warn(f"Attempting to download multiple files for product, ignoring user provided filename argument '{filename}', using default.") else: default_filename = filename @@ -78,7 +73,7 @@ def get_urls(self, fileType = FileDownloadType.DEFAULT_FILE) -> list: urls = [] def get_additional_urls(): - if not self.multiple_files: + if not self._has_multiple_files(): ASF_LOGGER.warning(f"You attempted to download multiple files from {self.properties['sceneName']}, this product only has one file to download.") return [] @@ -148,3 +143,6 @@ def remotezip(self, session: ASFSession) -> RemoteZip: from .download.download import remotezip return remotezip(self.properties['url'], session=session) + + def _has_multiple_files(self): + return 'additionalUrls' in self.properties and len(self.properties['additionalUrls']) > 0 diff --git a/tests/ASFProduct/test_ASFProduct.py b/tests/ASFProduct/test_ASFProduct.py index 22102d9c..e5cfd9e1 100644 --- a/tests/ASFProduct/test_ASFProduct.py +++ b/tests/ASFProduct/test_ASFProduct.py @@ -78,7 +78,7 @@ def run_test_ASFProduct_download(reference, filename, filetype, additional_urls) with patch('builtins.open', unittest.mock.mock_open()) as m: if filename != None and ( (filetype == FileDownloadType.ADDITIONAL_FILES and len(additional_urls) > 1) - or filetype == FileDownloadType.ALL_FILES + or (filetype == FileDownloadType.ALL_FILES and len(additional_urls) > 0) ): with pytest.warns(Warning): product.download('./', filename=filename, fileType=filetype) From e4f45ed9d45a7be5c5d41b495df4581178b563cd Mon Sep 17 00:00:00 2001 From: kim Date: Wed, 7 Feb 2024 16:32:48 -0900 Subject: [PATCH 11/30] removes missing import in baseline_searhc.py --- asf_search/search/baseline_search.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index 07c6a0c2..4d7da17f 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -3,10 +3,8 @@ from asf_search import ASF_LOGGER from copy import copy -from asf_search import ASF_LOGGER -from asf_search.baseline.stack import get_baseline_from_stack, get_default_product_type from asf_search.search import search, product_search -from asf_search.ASFSearchOptions import ASFSearchOptions, config +from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.ASFSearchResults import ASFSearchResults from asf_search import ASFProduct from asf_search.constants import PLATFORM From ec570e7cf410f7c1c8abf937f803266c03c37654 Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 12 Feb 2024 15:03:34 -0900 Subject: [PATCH 12/30] adds some checks for unavailable fields in jsonlite --- asf_search/export/jsonlite.py | 13 +++++++++++-- asf_search/export/jsonlite2.py | 6 +++++- asf_search/search/search.py | 1 + asf_search/search/search_generator.py | 8 +++++++- tests/Search/test_search.py | 2 +- 5 files changed, 25 insertions(+), 5 deletions(-) diff --git a/asf_search/export/jsonlite.py b/asf_search/export/jsonlite.py index 8f581cfd..56848b35 100644 --- a/asf_search/export/jsonlite.py +++ b/asf_search/export/jsonlite.py @@ -129,7 +129,7 @@ def getItem(self, p): pass try: - p['frameNumber'] = int(p['frameNumber']) + p['frameNumber'] = int(p.get('frameNumber')) except TypeError: pass @@ -176,13 +176,22 @@ def getItem(self, p): if result[key] in [ 'NA', 'NULL']: result[key] = None - if 'temporalBaseline' in p.keys() or 'perpendicularBaseline' in p.keys(): + if 'temporalBaseline' in p.keys(): result['temporalBaseline'] = p['temporalBaseline'] + if 'perpendicularBaseline' in p.keys(): result['perpendicularBaseline'] = p['perpendicularBaseline'] if p.get('processingLevel') == 'BURST': # is a burst product result['burst'] = p['burst'] + if p.get('operaBurstID') is not None or result['productID'].startswith('OPERA'): + result['opera'] = { + 'operaBurstID': p.get('operaBurstID'), + 'additionalUrls': p.get('additionalUrls'), + } + if p.get('validityStartDate'): + result['opera']['validityStartDate'] = p.get('validityStartDate') + return result def getOutputType(self) -> str: diff --git a/asf_search/export/jsonlite2.py b/asf_search/export/jsonlite2.py index 5cd936b2..125363df 100644 --- a/asf_search/export/jsonlite2.py +++ b/asf_search/export/jsonlite2.py @@ -54,12 +54,16 @@ def getItem(self, p): 'pge': p['pgeVersion'] } - if 'temporalBaseline' in p.keys() or 'perpendicularBaseline' in p.keys(): + if 'temporalBaseline' in p.keys(): result['tb'] = p['temporalBaseline'] + if 'perpendicularBaseline' in p.keys(): result['pb'] = p['perpendicularBaseline'] if p.get('burst') is not None: # is a burst product result['s1b'] = p['burst'] + + if p.get('opera') is not None: + result['s1o'] = p['opera'] return result diff --git a/asf_search/search/search.py b/asf_search/search/search.py index e91eac0c..4627ebbb 100644 --- a/asf_search/search/search.py +++ b/asf_search/search/search.py @@ -99,6 +99,7 @@ def search( results.searchComplete = page.searchComplete results.searchOptions = page.searchOptions + results.raise_if_incomplete() results.sort(key=lambda p: p.get_sort_keys(), reverse=True) return results diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index 59d8e5f1..14052ee2 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -103,7 +103,13 @@ def search_generator( logging.error(message) report_search_error(query, message) opts.session.headers.pop('CMR-Search-After', None) - return + # If it's a CMRIncompleteError, we can just stop here and return what we have + # It's up to the user to call .raise_if_incomplete() if they're using the + # generator directly. + if type(exc) == CMRIncompleteError: + return + else: + raise opts.session.headers.update({'CMR-Search-After': cmr_search_after_header}) last_page = process_page(items, maxResults, subquery_max_results, total, subquery_count, opts) diff --git a/tests/Search/test_search.py b/tests/Search/test_search.py index 7ef9df95..a95b6928 100644 --- a/tests/Search/test_search.py +++ b/tests/Search/test_search.py @@ -85,7 +85,7 @@ def custom_matcher(request: requests.Request): results = search(opts=searchOptions) assert results is not None - assert 0 < len(results) <= INTERNAL.CMR_PAGE_SIZE + assert 0 <= len(results) <= INTERNAL.CMR_PAGE_SIZE with raises(ASFSearchError): results.raise_if_incomplete() From 272b3cafd2002259c5bf376a0451ba47715762b4 Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 26 Feb 2024 17:06:38 -0900 Subject: [PATCH 13/30] updated to searchapi baseline tests --- asf_search/ASFStackableProduct.py | 7 +++++++ asf_search/Products/S1Product.py | 4 ++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/asf_search/ASFStackableProduct.py b/asf_search/ASFStackableProduct.py index 60c3830e..b85f0626 100644 --- a/asf_search/ASFStackableProduct.py +++ b/asf_search/ASFStackableProduct.py @@ -73,3 +73,10 @@ def get_default_baseline_product_type() -> Union[str, None]: Returns the product type to search for when building a baseline stack. """ return None + + def has_baseline(self) -> bool: + baseline = self.get_baseline_calc_properties() + + return ( + baseline is not None + ) \ No newline at end of file diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index 25282de7..329e37de 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -30,10 +30,10 @@ class S1Product(ASFStackableProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - if self._has_baseline(): + if self.has_baseline(): self.baseline = self.get_baseline_calc_properties() - def _has_baseline(self) -> bool: + def has_baseline(self) -> bool: baseline = self.get_baseline_calc_properties() return ( From a3d1221843fcffa105904027beb822a8e351cdee Mon Sep 17 00:00:00 2001 From: kim Date: Wed, 28 Feb 2024 16:23:50 -0900 Subject: [PATCH 14/30] line string work --- asf_search/ASFSearchOptions/validator_map.py | 5 +++-- asf_search/ASFSearchOptions/validators.py | 8 ++++++++ asf_search/CMR/subquery.py | 2 +- asf_search/CMR/translate.py | 3 +++ asf_search/Products/ALOSProduct.py | 1 + asf_search/search/search.py | 1 + asf_search/search/search_generator.py | 1 + 7 files changed, 18 insertions(+), 3 deletions(-) diff --git a/asf_search/ASFSearchOptions/validator_map.py b/asf_search/ASFSearchOptions/validator_map.py index b45baa3a..89b12aec 100644 --- a/asf_search/ASFSearchOptions/validator_map.py +++ b/asf_search/ASFSearchOptions/validator_map.py @@ -3,7 +3,7 @@ from .validators import ( parse_string, parse_float, parse_wkt, parse_date, parse_string_list, parse_int_list, parse_int_or_range_list, - parse_float_or_range_list, parse_circle, + parse_float_or_range_list, parse_circle, parse_linestring, parse_session ) @@ -32,7 +32,8 @@ def validate(key, value): 'beamMode': parse_string_list, 'beamSwath': parse_string_list, 'campaign': parse_string, - 'circle': parse_circle, + 'circle': parse_circle, + 'linestring': parse_linestring, 'maxDoppler': parse_float, 'minDoppler': parse_float, 'maxFaradayRotation': parse_float, diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index 5208db9c..65d0c857 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -201,6 +201,14 @@ def parse_circle(value: List[float]) -> str: raise ValueError(f'Invalid circle, must be 3 values (lat, long, radius). Got: {value}') return value +# Parse a CMR linestring: +# [longitude, latitude, longitude, latitude, ...] +def parse_linestring(value: List[float]) -> str: + value = parse_float_list(value) + if len(value) % 2 != 0: + raise ValueError(f'Invalid linestring, must be values of format (lat, long, lat, long, ...). Got: {value}') + return value + # Take "requests.Session", or anything that subclasses it: def parse_session(session: Type[requests.Session]): if issubclass(type(session), requests.Session): diff --git a/asf_search/CMR/subquery.py b/asf_search/CMR/subquery.py index f5fb5d08..cab51f94 100644 --- a/asf_search/CMR/subquery.py +++ b/asf_search/CMR/subquery.py @@ -22,7 +22,7 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: if params.get(chunked_key) is not None: params[chunked_key] = chunk_list(params[chunked_key], CMR_PAGE_SIZE) - list_param_names = ['platform', 'season', 'collections', 'circle', 'dataset'] # these parameters will dodge the subquery system + list_param_names = ['platform', 'season', 'collections', 'circle', 'linestring', 'dataset'] # these parameters will dodge the subquery system skip_param_names = ['maxResults']# these params exist in opts, but shouldn't be passed on to subqueries at ALL collections, aliased_keywords = get_keyword_concept_ids(params, opts.collectionAlias) diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index 9944171e..af6e3d26 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -49,6 +49,9 @@ def translate_opts(opts: ASFSearchOptions) -> List: # Map: to convert floats to strings before joining: dict_opts['circle'] = ','.join(map(str, dict_opts['circle'])) + # if "linestring" in dict_opts: + # dict_opts['linestring'] = ','.join(map(str, dict_opts['linestring'])) + # If you need to use the temporal key: if any(key in dict_opts for key in ['start', 'end', 'season']): dict_opts = fix_date(dict_opts) diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index 9f31011b..d90902f7 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -16,6 +16,7 @@ class ALOSProduct(ASFStackableProduct): 'offNadirAngle': {'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0], 'cast': try_parse_float}, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): diff --git a/asf_search/search/search.py b/asf_search/search/search.py index 4627ebbb..51559135 100644 --- a/asf_search/search/search.py +++ b/asf_search/search/search.py @@ -13,6 +13,7 @@ def search( beamSwath: Union[str, Sequence[str]] = None, campaign: Union[str, Sequence[str]] = None, circle: Tuple[float, float, float] = None, + linestring: Sequence[float] = None, maxDoppler: float = None, minDoppler: float = None, end: Union[datetime.datetime, str] = None, diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index 14052ee2..b3e4c9d8 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -31,6 +31,7 @@ def search_generator( beamSwath: Union[str, Sequence[str]] = None, campaign: Union[str, Sequence[str]] = None, circle: Tuple[float, float, float] = None, + linestring: Sequence[float] = None, maxDoppler: float = None, minDoppler: float = None, end: Union[datetime.datetime, str] = None, From 3bb63e082f70d2e5a418b20fa01b75736a5a0aee Mon Sep 17 00:00:00 2001 From: kim Date: Thu, 29 Feb 2024 13:32:25 -0900 Subject: [PATCH 15/30] moves linestring repair before intersectsWith conversion --- asf_search/CMR/translate.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index af6e3d26..65efefea 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -8,7 +8,7 @@ from shapely.geometry import Polygon from shapely.geometry.base import BaseGeometry from .field_map import field_map -from .datasets import dataset_collections, collections_per_platform +from .datasets import collections_per_platform import logging @@ -24,6 +24,14 @@ def translate_opts(opts: ASFSearchOptions) -> List: if escape_commas in dict_opts: dict_opts[escape_commas] = dict_opts[escape_commas].replace(",", "\,") + + if "linestring" in dict_opts: + dict_opts['linestring'] = ','.join(map(str, dict_opts['linestring'])) + + if "circle" in dict_opts: + # Map: to convert floats to strings before joining: + dict_opts['circle'] = ','.join(map(str, dict_opts['circle'])) + # Special case to unravel WKT field a little for compatibility if "intersectsWith" in dict_opts: shape = wkt.loads(dict_opts.pop('intersectsWith', None)) @@ -45,12 +53,6 @@ def translate_opts(opts: ASFSearchOptions) -> List: (shapeType, shape) = wkt_to_cmr_shape(shape).split(':') dict_opts[shapeType] = shape - if "circle" in dict_opts: - # Map: to convert floats to strings before joining: - dict_opts['circle'] = ','.join(map(str, dict_opts['circle'])) - - # if "linestring" in dict_opts: - # dict_opts['linestring'] = ','.join(map(str, dict_opts['linestring'])) # If you need to use the temporal key: if any(key in dict_opts for key in ['start', 'end', 'season']): From 495faa9ee51770d97a6df1c71902cb205c4a5764 Mon Sep 17 00:00:00 2001 From: kim Date: Tue, 5 Mar 2024 11:34:50 -0900 Subject: [PATCH 16/30] bugfix: fixes range params, changes old exception text, adds non-polygon shapes --- asf_search/ASFSearchOptions/validator_map.py | 9 ++++++- asf_search/ASFSearchOptions/validators.py | 19 ++++++++++--- asf_search/CMR/subquery.py | 28 +++++++++++++++++--- asf_search/CMR/translate.py | 27 +++++++++++++------ asf_search/exceptions.py | 9 ++----- asf_search/search/search_generator.py | 3 ++- 6 files changed, 72 insertions(+), 23 deletions(-) diff --git a/asf_search/ASFSearchOptions/validator_map.py b/asf_search/ASFSearchOptions/validator_map.py index 0830fbde..fc7a2a8d 100644 --- a/asf_search/ASFSearchOptions/validator_map.py +++ b/asf_search/ASFSearchOptions/validator_map.py @@ -4,7 +4,7 @@ parse_string, parse_float, parse_wkt, parse_date, parse_string_list, parse_int_list, parse_int_or_range_list, parse_float_or_range_list, parse_circle, parse_linestring, - parse_cmr_keywords_list, + parse_cmr_keywords_list, parse_point, parse_coord_string, parse_session ) @@ -35,10 +35,17 @@ def validate(key, value): 'campaign': parse_string, 'circle': parse_circle, 'linestring': parse_linestring, + 'point': parse_point, + 'maxBaselinePerp': parse_float, + 'minBaselinePerp': parse_float, + 'maxInsarStackSize': parse_float, + 'minInsarStackSize': parse_float, 'maxDoppler': parse_float, 'minDoppler': parse_float, 'maxFaradayRotation': parse_float, 'minFaradayRotation': parse_float, + 'maxInsarStackSize': parse_int_or_range_list, + 'minInsarStackSize': parse_int_or_range_list, 'flightDirection': parse_string, 'flightLine': parse_string, 'frame': parse_int_or_range_list, diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index 05518952..5a84560e 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -52,7 +52,7 @@ def parse_date(value: Union[str, datetime.datetime]) -> str: date = dateparser.parse(str(value)) if date is None: raise ValueError(f"Invalid date: '{value}'.") - return str(value) + return str(date.date()) def parse_range(value: Tuple[number, number], h: Callable[[number], number]) -> Tuple[number, number]: @@ -213,7 +213,7 @@ def parse_wkt(value: str) -> str: # Parse a CMR circle: # [longitude, latitude, radius(meters)] -def parse_circle(value: List[float]) -> str: +def parse_circle(value: List[float]) -> List[float]: value = parse_float_list(value) if len(value) != 3: raise ValueError(f'Invalid circle, must be 3 values (lat, long, radius). Got: {value}') @@ -221,12 +221,25 @@ def parse_circle(value: List[float]) -> str: # Parse a CMR linestring: # [longitude, latitude, longitude, latitude, ...] -def parse_linestring(value: List[float]) -> str: +def parse_linestring(value: List[float]) -> List[float]: value = parse_float_list(value) if len(value) % 2 != 0: raise ValueError(f'Invalid linestring, must be values of format (lat, long, lat, long, ...). Got: {value}') return value +def parse_point(value: List[float]) -> List[float]: + value = parse_float_list(value) + if len(value) != 2: + raise ValueError(f'Invalid point, must be values of format (lat, long). Got: {value}') + return value + +# Parse and validate a coordinate string +def parse_coord_string(value: List): + value = parse_float_list(value) + if len(value) % 2 != 0: + raise ValueError(f'Invalid coordinate string, must be values of format (lat, long, lat, long, ...). Got: {value}') + return value + # Take "requests.Session", or anything that subclasses it: def parse_session(session: Type[requests.Session]): if issubclass(type(session), requests.Session): diff --git a/asf_search/CMR/subquery.py b/asf_search/CMR/subquery.py index cab51f94..8b2c52c4 100644 --- a/asf_search/CMR/subquery.py +++ b/asf_search/CMR/subquery.py @@ -1,10 +1,10 @@ -from typing import List, Optional, Tuple +from typing import List, Tuple import itertools from copy import copy from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.constants import CMR_PAGE_SIZE - +from asf_search.CMR.field_map import field_map from asf_search.CMR.datasets import collections_by_processing_level, collections_per_platform, dataset_collections, get_concept_id_alias, get_dataset_concept_ids from numpy import intersect1d, union1d @@ -22,7 +22,7 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: if params.get(chunked_key) is not None: params[chunked_key] = chunk_list(params[chunked_key], CMR_PAGE_SIZE) - list_param_names = ['platform', 'season', 'collections', 'circle', 'linestring', 'dataset'] # these parameters will dodge the subquery system + list_param_names = ['platform', 'season', 'collections', 'circle', 'linestring', 'point', 'dataset'] # these parameters will dodge the subquery system skip_param_names = ['maxResults']# these params exist in opts, but shouldn't be passed on to subqueries at ALL collections, aliased_keywords = get_keyword_concept_ids(params, opts.collectionAlias) @@ -137,6 +137,28 @@ def format_query_params(params) -> List[List[dict]]: def translate_param(param_name, param_val) -> List[dict]: + # param_list = [] + + # cmr_input_map = field_map + + # param_input_map = cmr_input_map[param_name] + # cmr_param = param_input_map['key'] + # cmr_format_str = param_input_map['fmt'] + + # if not isinstance(param_val, list): + # param_val = [param_val] + + # for l in param_val: + # format_val = l + + # if isinstance(l, list): + # format_val = ','.join([f'{t}' for t in l]) + + # param_list.append({ + # cmr_param: cmr_format_str.format(format_val) + # }) + + # return param_list param_list = [] if not isinstance(param_val, list): diff --git a/asf_search/CMR/translate.py b/asf_search/CMR/translate.py index 20a6e198..8d736379 100644 --- a/asf_search/CMR/translate.py +++ b/asf_search/CMR/translate.py @@ -24,13 +24,7 @@ def translate_opts(opts: ASFSearchOptions) -> List: if escape_commas in dict_opts: dict_opts[escape_commas] = dict_opts[escape_commas].replace(",", "\,") - - if "linestring" in dict_opts: - dict_opts['linestring'] = ','.join(map(str, dict_opts['linestring'])) - - if "circle" in dict_opts: - # Map: to convert floats to strings before joining: - dict_opts['circle'] = ','.join(map(str, dict_opts['circle'])) + dict_opts = fix_cmr_shapes(dict_opts) # Special case to unravel WKT field a little for compatibility if "intersectsWith" in dict_opts: @@ -58,6 +52,8 @@ def translate_opts(opts: ASFSearchOptions) -> List: if any(key in dict_opts for key in ['start', 'end', 'season']): dict_opts = fix_date(dict_opts) + dict_opts = fix_range_params(dict_opts) + # convert the above parameters to a list of key/value tuples cmr_opts = [] @@ -103,6 +99,14 @@ def translate_opts(opts: ASFSearchOptions) -> List: return cmr_opts +def fix_cmr_shapes(fixed_params: Dict[str, Any]) -> Dict[str, Any]: + """Fixes raw CMR lon lat coord shapes""" + for param in ['point', 'linestring', 'circle']: + if param in fixed_params: + fixed_params[param] = ','.join(map(str, fixed_params[param])) + + return fixed_params + def should_use_asf_frame(cmr_opts): asf_frame_platforms = ['SENTINEL-1A', 'SENTINEL-1B', 'ALOS'] @@ -163,7 +167,7 @@ def try_parse_float(value: str) -> Optional[float]: return float(value) -def fix_date(fixed_params: Dict[str, Any]): +def fix_date(fixed_params: Dict[str, Any]) -> Dict[str, Any]: if 'start' in fixed_params or 'end' in fixed_params or 'season' in fixed_params: fixed_params["start"] = fixed_params["start"] if "start" in fixed_params else "1978-01-01T00:00:00Z" fixed_params["end"] = fixed_params["end"] if "end" in fixed_params else datetime.utcnow().isoformat() @@ -178,6 +182,13 @@ def fix_date(fixed_params: Dict[str, Any]): return fixed_params +def fix_range_params(fixed_params: Dict[str, Any]) -> Dict[str, Any]: + """Converts ranges to comma separated strings""" + for param in ['offNadirAngle', 'relativeOrbit', 'absoluteOrbit', 'frame', 'asfFrame']: + if param in fixed_params.keys() and isinstance(fixed_params[param], list): + fixed_params[param] = ','.join([str(val) for val in fixed_params[param]]) + + return fixed_params def should_use_bbox(shape: BaseGeometry): """ diff --git a/asf_search/exceptions.py b/asf_search/exceptions.py index 8468af0e..77f77aea 100644 --- a/asf_search/exceptions.py +++ b/asf_search/exceptions.py @@ -7,16 +7,11 @@ class ASFSearchError(ASFError): class ASFSearch4xxError(ASFSearchError): - """Raise when SearchAPI returns a 4xx error""" + """Raise when CMR returns a 4xx error""" class ASFSearch5xxError(ASFSearchError): - """Raise when SearchAPI returns a 5xx error""" - - -class ASFServerError(ASFSearchError): - """Raise when SearchAPI returns an unknown error""" - + """Raise when CMR returns a 5xx error""" class ASFBaselineError(ASFSearchError): """Raise when baseline related errors occur""" diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index 9e3cf455..5589bec8 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -8,7 +8,7 @@ import dateparser import warnings -from asf_search import __version__ +from asf_search import ASF_LOGGER, __version__ from asf_search.ASFSearchResults import ASFSearchResults from asf_search.ASFSearchOptions import ASFSearchOptions @@ -93,6 +93,7 @@ def search_generator( queries = build_subqueries(opts) for query in queries: translated_opts = translate_opts(query) + ASF_LOGGER.warning(f"TRANSLATED PARAMS: {translated_opts}") cmr_search_after_header = "" subquery_count = 0 From 1a7214890a5639a2d61afb2b331387a298a406bb Mon Sep 17 00:00:00 2001 From: kim Date: Tue, 5 Mar 2024 12:17:46 -0900 Subject: [PATCH 17/30] update test case logic, remove searchapi output tests --- tests/ASFProduct/test_ASFProduct.py | 2 +- tests/ASFSession/test_ASFSession.py | 6 +++--- tests/Search/test_search.py | 9 ++------- tests/download/test_download.py | 14 +++++++------- tests/pytest-config.yml | 8 ++++---- tests/pytest-managers.py | 16 ++++++++-------- tests/yml_tests/test_ASFSearchResults.yml | 20 ++++++++++---------- 7 files changed, 35 insertions(+), 40 deletions(-) diff --git a/tests/ASFProduct/test_ASFProduct.py b/tests/ASFProduct/test_ASFProduct.py index 42214a2f..efa62c14 100644 --- a/tests/ASFProduct/test_ASFProduct.py +++ b/tests/ASFProduct/test_ASFProduct.py @@ -70,7 +70,7 @@ def run_test_product_get_stack_options(reference, options): def run_test_ASFProduct_download(reference, filename, filetype, additional_urls): product = as_ASFProduct(reference, ASFSession()) product.properties['additionalUrls'] = additional_urls - with patch('asf_search.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 200 mock_get.return_value = resp diff --git a/tests/ASFSession/test_ASFSession.py b/tests/ASFSession/test_ASFSession.py index 21382372..ff28abb3 100644 --- a/tests/ASFSession/test_ASFSession.py +++ b/tests/ASFSession/test_ASFSession.py @@ -16,7 +16,7 @@ def run_auth_with_creds(username: str, password: str): def run_auth_with_token(token: str): session = ASFSession() - with patch('asf_search.ASFSession.get') as mock_token_session: + with patch('asf_search.ASFSession.ASFSession.get') as mock_token_session: if not token.startswith('Bearer EDL'): mock_token_session.return_value.status_code = 400 session.auth_with_token(token) @@ -43,7 +43,7 @@ def run_test_asf_session_rebuild_auth( session = ASFSession() - with patch('asf_search.ASFSession.get') as mock_token_session: + with patch('asf_search.ASFSession.ASFSession.get') as mock_token_session: mock_token_session.return_value.status_code = 200 session.auth_with_token("bad_token") @@ -57,7 +57,7 @@ def run_test_asf_session_rebuild_auth( response.request.url = response_domain response.headers.update({'Authorization': 'Bearer fakeToken'}) - with patch('asf_search.ASFSession._get_domain') as hostname_patch: + with patch('asf_search.ASFSession.ASFSession._get_domain') as hostname_patch: hostname_patch.side_effect = [original_domain, response_domain] session.rebuild_auth(req, response) diff --git a/tests/Search/test_search.py b/tests/Search/test_search.py index a95b6928..c2ce45ee 100644 --- a/tests/Search/test_search.py +++ b/tests/Search/test_search.py @@ -60,10 +60,8 @@ def run_test_search_http_error(search_parameters, status_code: Number, report: s m.register_uri('POST', f"https://{INTERNAL.CMR_HOST}{INTERNAL.CMR_GRANULE_PATH}", status_code=status_code, json={'errors': {'report': report}}) m.register_uri('POST', f"https://search-error-report.asf.alaska.edu/", real_http=True) searchOptions = ASFSearchOptions(**search_parameters) - results = search(opts=searchOptions) - assert len(results) == 0 with raises(ASFSearchError): - results.raise_if_incomplete() + results = search(opts=searchOptions) return # If we're not doing an empty search we want to fire off one real query to CMR, then interrupt it with an error @@ -82,12 +80,9 @@ def custom_matcher(request: requests.Request): search_parameters['maxResults'] = INTERNAL.CMR_PAGE_SIZE + 1 searchOptions = ASFSearchOptions(**search_parameters) - results = search(opts=searchOptions) - assert results is not None - assert 0 <= len(results) <= INTERNAL.CMR_PAGE_SIZE with raises(ASFSearchError): - results.raise_if_incomplete() + results = search(opts=searchOptions) def run_test_dataset_search(datasets: List): if any(dataset for dataset in datasets if dataset_collections.get(dataset) is None): diff --git a/tests/download/test_download.py b/tests/download/test_download.py index 794224b1..0a3979ee 100644 --- a/tests/download/test_download.py +++ b/tests/download/test_download.py @@ -8,7 +8,7 @@ from asf_search.download.download import download_url def run_test_download_url_auth_error(url, path, filename): - with patch('asf_search.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 401 mock_get.return_value = resp @@ -17,17 +17,17 @@ def run_test_download_url_auth_error(url, path, filename): with pytest.raises(ASFDownloadError): download_url(url, path, filename) - with patch('asf_search.download.os.path.isdir') as path_mock: + with patch('os.path.isdir') as path_mock: path_mock.return_value = True if url == "urlError": - with patch('asf_search.download.os.path.isfile') as isfile_mock: + with patch('os.path.isfile') as isfile_mock: isfile_mock.return_value = False with pytest.raises(ASFAuthenticationError): download_url(url, path, filename) - with patch('asf_search.download.os.path.isfile') as isfile_mock: + with patch('os.path.isfile') as isfile_mock: isfile_mock.return_value = True with pytest.warns(Warning): @@ -35,13 +35,13 @@ def run_test_download_url_auth_error(url, path, filename): def run_test_download_url(url, path, filename): if filename == 'BURST': - with patch('asf_search.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 202 resp.headers.update({'content-type': 'application/json'}) mock_get.return_value = resp - with patch('asf_search.ASFSession.get') as mock_get_burst: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get_burst: resp_2 = requests.Response() resp_2.status_code = 200 resp_2.headers.update({'content-type': 'image/tiff'}) @@ -51,7 +51,7 @@ def run_test_download_url(url, path, filename): with patch('builtins.open', unittest.mock.mock_open()) as m: download_url(url, path, filename) else: - with patch('asf_search.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 200 mock_get.return_value = resp diff --git a/tests/pytest-config.yml b/tests/pytest-config.yml index 0407bd6b..9087d87b 100644 --- a/tests/pytest-config.yml +++ b/tests/pytest-config.yml @@ -188,10 +188,10 @@ test_types: required_in_title: serialization method: test_serialization -- For running ASFSearchOptions tests: - required_in_title: ASFSearchResults-format - required_keys: results - method: test_output_format +# - For running ASFSearchOptions tests: +# required_in_title: ASFSearchResults-format +# required_keys: results +# method: test_output_format - For running search-api keyword-collection aliasing tests: required_in_title: test-aliasing-search-against-api diff --git a/tests/pytest-managers.py b/tests/pytest-managers.py index 1cb9df47..b6619f3c 100644 --- a/tests/pytest-managers.py +++ b/tests/pytest-managers.py @@ -82,7 +82,7 @@ def test_ASFSession_Error(**args) -> None: test_info = args["test_info"] username = test_info["username"] password = test_info["password"] - with patch('asf_search.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.ASFSession.get') as mock_get: mock_get.return_value = "Error" with raises(ASFAuthenticationError): @@ -457,15 +457,15 @@ def safe_load_tuple(param): return param -def test_output_format(**args) -> None: - test_info = args['test_info'] +# def test_output_format(**args) -> None: +# test_info = args['test_info'] - products = get_resource(test_info['results']) - if not isinstance(products, List): - products = [products] - results = ASFSearchResults([as_ASFProduct({'meta': product['meta'], 'umm': product['umm']}, ASFSession()) for product in products]) +# products = get_resource(test_info['results']) +# if not isinstance(products, List): +# products = [products] +# results = ASFSearchResults([as_ASFProduct({'meta': product['meta'], 'umm': product['umm']}, ASFSession()) for product in products]) - run_test_output_format(results) +# run_test_output_format(results) def test_keyword_aliasing_results(**args) -> None: test_info = args['test_info'] diff --git a/tests/yml_tests/test_ASFSearchResults.yml b/tests/yml_tests/test_ASFSearchResults.yml index 2fd49568..046d2df5 100644 --- a/tests/yml_tests/test_ASFSearchResults.yml +++ b/tests/yml_tests/test_ASFSearchResults.yml @@ -29,17 +29,17 @@ tests: - Test ASFSearchResults_intersection antimeridian: wkt: POLYGON((-181 -89, -179 -89, -179 89, -181 89, -181 -89)) -- Test ASFSearchResults-format Fairbanks slc: - results: [Fairbanks_SLC.yml] +# - Test ASFSearchResults-format Fairbanks slc: +# results: [Fairbanks_SLC.yml] -- Test ASFSearchResults-format Fairbanks S1 Stack: - results: Fairbanks_S1_stack.yml +# - Test ASFSearchResults-format Fairbanks S1 Stack: +# results: Fairbanks_S1_stack.yml -- Test ASFSearchResults-format Alos: - results: Alos_response.yml +# - Test ASFSearchResults-format Alos: +# results: Alos_response.yml -- Test ASFSearchResults-format L1: - results: Fairbanks_L1.yml +# - Test ASFSearchResults-format L1: +# results: Fairbanks_L1.yml -- Test ASFSearchResults-format ERS Stack: - results: Fairbanks_ers_stack.yml +# - Test ASFSearchResults-format ERS Stack: +# results: Fairbanks_ers_stack.yml From 47b46f7750c44a57861ad1ed37eed411bc8e8a74 Mon Sep 17 00:00:00 2001 From: kim Date: Tue, 5 Mar 2024 14:10:10 -0900 Subject: [PATCH 18/30] remove "ASFSession.ASFSession" in patching test cases --- tests/ASFProduct/test_ASFProduct.py | 2 +- tests/ASFSession/test_ASFSession.py | 6 +++--- tests/download/test_download.py | 8 ++++---- tests/pytest-managers.py | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/ASFProduct/test_ASFProduct.py b/tests/ASFProduct/test_ASFProduct.py index efa62c14..42214a2f 100644 --- a/tests/ASFProduct/test_ASFProduct.py +++ b/tests/ASFProduct/test_ASFProduct.py @@ -70,7 +70,7 @@ def run_test_product_get_stack_options(reference, options): def run_test_ASFProduct_download(reference, filename, filetype, additional_urls): product = as_ASFProduct(reference, ASFSession()) product.properties['additionalUrls'] = additional_urls - with patch('asf_search.ASFSession.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 200 mock_get.return_value = resp diff --git a/tests/ASFSession/test_ASFSession.py b/tests/ASFSession/test_ASFSession.py index ff28abb3..21382372 100644 --- a/tests/ASFSession/test_ASFSession.py +++ b/tests/ASFSession/test_ASFSession.py @@ -16,7 +16,7 @@ def run_auth_with_creds(username: str, password: str): def run_auth_with_token(token: str): session = ASFSession() - with patch('asf_search.ASFSession.ASFSession.get') as mock_token_session: + with patch('asf_search.ASFSession.get') as mock_token_session: if not token.startswith('Bearer EDL'): mock_token_session.return_value.status_code = 400 session.auth_with_token(token) @@ -43,7 +43,7 @@ def run_test_asf_session_rebuild_auth( session = ASFSession() - with patch('asf_search.ASFSession.ASFSession.get') as mock_token_session: + with patch('asf_search.ASFSession.get') as mock_token_session: mock_token_session.return_value.status_code = 200 session.auth_with_token("bad_token") @@ -57,7 +57,7 @@ def run_test_asf_session_rebuild_auth( response.request.url = response_domain response.headers.update({'Authorization': 'Bearer fakeToken'}) - with patch('asf_search.ASFSession.ASFSession._get_domain') as hostname_patch: + with patch('asf_search.ASFSession._get_domain') as hostname_patch: hostname_patch.side_effect = [original_domain, response_domain] session.rebuild_auth(req, response) diff --git a/tests/download/test_download.py b/tests/download/test_download.py index 0a3979ee..a89cd872 100644 --- a/tests/download/test_download.py +++ b/tests/download/test_download.py @@ -8,7 +8,7 @@ from asf_search.download.download import download_url def run_test_download_url_auth_error(url, path, filename): - with patch('asf_search.ASFSession.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 401 mock_get.return_value = resp @@ -35,13 +35,13 @@ def run_test_download_url_auth_error(url, path, filename): def run_test_download_url(url, path, filename): if filename == 'BURST': - with patch('asf_search.ASFSession.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 202 resp.headers.update({'content-type': 'application/json'}) mock_get.return_value = resp - with patch('asf_search.ASFSession.ASFSession.get') as mock_get_burst: + with patch('asf_search.ASFSession.get') as mock_get_burst: resp_2 = requests.Response() resp_2.status_code = 200 resp_2.headers.update({'content-type': 'image/tiff'}) @@ -51,7 +51,7 @@ def run_test_download_url(url, path, filename): with patch('builtins.open', unittest.mock.mock_open()) as m: download_url(url, path, filename) else: - with patch('asf_search.ASFSession.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.get') as mock_get: resp = requests.Response() resp.status_code = 200 mock_get.return_value = resp diff --git a/tests/pytest-managers.py b/tests/pytest-managers.py index b6619f3c..96d2cb5e 100644 --- a/tests/pytest-managers.py +++ b/tests/pytest-managers.py @@ -82,7 +82,7 @@ def test_ASFSession_Error(**args) -> None: test_info = args["test_info"] username = test_info["username"] password = test_info["password"] - with patch('asf_search.ASFSession.ASFSession.get') as mock_get: + with patch('asf_search.ASFSession.get') as mock_get: mock_get.return_value = "Error" with raises(ASFAuthenticationError): From bfc67dfd6a9437e7a09c248d245102587844b224 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 8 Mar 2024 13:20:19 -0900 Subject: [PATCH 19/30] fixes broken jsonlite outputs when results are empty --- asf_search/ASFSearchOptions/validators.py | 2 +- asf_search/export/jsonlite.py | 5 ++++- asf_search/export/jsonlite2.py | 6 +++++- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/asf_search/ASFSearchOptions/validators.py b/asf_search/ASFSearchOptions/validators.py index 9c09d568..fb192682 100644 --- a/asf_search/ASFSearchOptions/validators.py +++ b/asf_search/ASFSearchOptions/validators.py @@ -129,7 +129,7 @@ def parse_cmr_keywords_list(value: Sequence[Dict]): # Parse and validate an iterable of strings: "foo,bar,baz" def parse_string_list(value: Sequence[str]) -> List[str]: - return parse_list(value, str) + return parse_list(value, parse_string) # Parse and validate an iterable of integers: "1,2,3" diff --git a/asf_search/export/jsonlite.py b/asf_search/export/jsonlite.py index 56848b35..99e73de8 100644 --- a/asf_search/export/jsonlite.py +++ b/asf_search/export/jsonlite.py @@ -19,7 +19,10 @@ def results_to_jsonlite(results): ASF_LOGGER.info('started translating results to jsonlite format') - + if len(results) == 0: + yield from json.JSONEncoder(indent=2, sort_keys=True).iterencode({'results': []}) + return + if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): results = [results] diff --git a/asf_search/export/jsonlite2.py b/asf_search/export/jsonlite2.py index 125363df..fac39943 100644 --- a/asf_search/export/jsonlite2.py +++ b/asf_search/export/jsonlite2.py @@ -7,7 +7,11 @@ def results_to_jsonlite2(results): ASF_LOGGER.info('started translating results to jsonlite2 format') - + + if len(results) == 0: + yield from json.JSONEncoder(indent=2, sort_keys=True).iterencode({'results': []}) + return + if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): results = [results] From 9410d8589b1d7e21d679f0cf56660c1bf4c6091a Mon Sep 17 00:00:00 2001 From: kim Date: Tue, 2 Apr 2024 16:22:48 -0800 Subject: [PATCH 20/30] fixes changed method name in S1Product --- asf_search/Products/S1Product.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index 6092fea3..45ea8fc3 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -33,7 +33,7 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): self.properties['s3Urls'] = self._get_s3_urls() - if self._has_baseline(): + if self.has_baseline(): self.baseline = self.get_baseline_calc_properties() def has_baseline(self) -> bool: From 853807716cf5259414ed6bded5e2d63264d89b11 Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 8 Apr 2024 17:05:28 -0800 Subject: [PATCH 21/30] got asfframe known bugs tests passing, added comment for first test in file --- asf_search/Products/RADARSATProduct.py | 3 +- asf_search/WKT/FilesToWKT.py | 209 +++++++++++++++++++++++++ asf_search/WKT/__init__.py | 1 + asf_search/__init__.py | 2 +- asf_search/export/csv.py | 4 +- setup.py | 7 +- 6 files changed, 221 insertions(+), 5 deletions(-) create mode 100644 asf_search/WKT/FilesToWKT.py diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 7db7f1b2..734194d1 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -1,6 +1,6 @@ from typing import Dict, Union from asf_search import ASFSearchOptions, ASFSession, ASFProduct, ASFStackableProduct -from asf_search.CMR.translate import try_parse_float +from asf_search.CMR.translate import try_parse_float, try_parse_int from asf_search.constants import PRODUCT_TYPE @@ -13,6 +13,7 @@ class RADARSATProduct(ASFStackableProduct): 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, + 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): diff --git a/asf_search/WKT/FilesToWKT.py b/asf_search/WKT/FilesToWKT.py new file mode 100644 index 00000000..e7254411 --- /dev/null +++ b/asf_search/WKT/FilesToWKT.py @@ -0,0 +1,209 @@ +import os +import json +import zipfile +import shapefile +import defusedxml.minidom as md +from kml2geojson import build_feature_collection as kml2json +from geomet import wkt +from io import BytesIO +import geopandas + +# taken from asf's Discovery-WKTUtils +# Repo: https://github.com/asfadmin/Discovery-WKTUtils +# File: https://github.com/asfadmin/Discovery-WKTUtils/blob/devel/WKTUtils/FilesToWKT.py +class filesToWKT: + # files = [ open(dir, 'rb'), open(dir2, 'rb'), open(dir3, 'rb') ] + def __init__(self, files): + self.files = files + self.errors = [] + self.returned_dict = {} + # If they pass only one, make that a list of one: + if not isinstance(files, type([])): + self.files = [self.files] + # Have to group all shp types together: + file_dict = {} + for file in self.files: + try: + full_name = file.filename + except AttributeError: + full_name = file.name + name = ".".join(full_name.split(".")[:-1]) # Everything before the last dot. + ext = full_name.split(".")[-1:][0].lower() # Everything after the last dot. + ### First see if geopandas can handle it. + try: + geoshape: geopandas.GeoDataFrame = geopandas.read_file(file) + # Turn from GeoDataFrame to GeoSeries: + geoshape = geoshape.geometry + # Add it to the file list: + self.add_file_to_dict(file_dict, name+".pandas", geoshape) + continue + # If anything goes wrong, try to go back to the old ways: + except: + file.seek(0) # Move read curser to 0, lets you read again + if ext == "zip": + # First check for a full shapefile set: + with BytesIO(file.read()) as zip_f: + zip_obj = zipfile.ZipFile(zip_f) + parts = zip_obj.namelist() + for part_path in parts: + # If it's a dir, skip it. ('parts' still contains the files in that dir) + if part_path.endswith("/"): + continue + self.add_file_to_dict(file_dict, part_path, zip_obj.read(part_path)) + else: + # Try to add whatever it is: + self.add_file_to_dict(file_dict, full_name, file.read()) + + # With everything organized in dict, start parsing them: + wkt_list = [] + for key, val in file_dict.items(): + ext = key.split(".")[-1:][0].lower() + # If it's a shp set. (Check first, because 'file.kml.shp' will be loaded, and + # the key will become 'file.kml'. The val is always a dict for shps tho): + if isinstance(val, type({})): + returned_wkt = parse_shapefile(val) + elif ext == "pandas": + # For this, val IS the geopandas object. + # Check if you need to reporject the wkt. (Might be None): + if val.crs and val.crs != "EPSG:4326": + val = val.to_crs("EPSG:4326") + if len(val) == 0: + continue + elif len(val) == 1: + returned_wkt = json_to_wkt(val[0].__geo_interface__) + else: + tmp_list = [json_to_wkt(shape.__geo_interface__) for shape in val] + returned_wkt = "GEOMETRYCOLLECTION ({0})".format(",".join(tmp_list)) + # Check for each type now: + elif ext == "geojson": + returned_wkt = parse_geojson(val) + elif ext == "kml": + returned_wkt = parse_kml(val) + else: + # This *should* never get hit, but someone might add a new file-type in 'add_file_to_dict' w/out declaring it here. + self.errors.append({"type": "STREAM_UNKNOWN", "report": "Ignoring file with unknown tag. File: '{0}'".format(os.path.basename(key))}) + continue + # If the parse function returned a json error: + if isinstance(returned_wkt, type({})) and "error" in returned_wkt: + # Give the error a better error discription: + returned_wkt["error"]["report"] += " (Cannot load file: '{0}')".format(os.path.basename(key)) + self.errors.append(returned_wkt["error"]) + continue + else: + wkt_list.append(returned_wkt) + + # Turn it into a single WKT: + full_wkt = "GEOMETRYCOLLECTION({0})".format(",".join(wkt_list)) + + # Bring it to json and back, to collaps any nested GEOMETRYCOLLECTIONS. + # It'll be in a collection if and only if there are more than one shapes. + full_wkt = json_to_wkt(wkt.loads(full_wkt)) + self.returned_dict = {"parsed wkt": full_wkt} + + + def getWKT(self): + # Only return the 'errors' key IF there are errors... + if self.errors != []: + self.returned_dict['errors'] = self.errors + return self.returned_dict + + # Helper for organizing files into a dict, combining shps/shx, etc. + def add_file_to_dict(self, file_dict, full_name, file_stream): + ext = full_name.split(".")[-1:][0].lower() # Everything after the last dot. + file_name = ".".join(full_name.split(".")[:-1]) # Everything before the last dot. + + # SHP'S: + if ext in ["shp", "shx", "dbf"]: + # Save shps as {"filename": {"shp": data, "shx": data, "dbf": data}, "file_2.kml": kml_data} + if file_name not in file_dict: + file_dict[file_name] = {} + file_dict[file_name][ext] = BytesIO(file_stream) + elif ext in ["pandas"]: + file_dict[full_name] = file_stream # Actually geopandas object for this one. + # BASIC FILES: + elif ext in ["kml", "geojson"]: + file_dict[full_name] = BytesIO(file_stream) + # Else they pass a zip again: + elif ext in ["zip"]: + self.errors.append({"type": "FILE_UNZIP", "report": "Cannot unzip double-compressed files. File: '{0}'.".format(os.path.basename(full_name))}) + else: + self.errors.append({"type": "FILE_UNKNOWN", "report": "Ignoring file with unknown extension. File: '{0}'.".format(os.path.basename(full_name))}) + + + +# Takes any json, and returns a list of all {"type": x, "coordinates": y} objects +# found, ignoring anything else in the block +def recurse_find_geojson(json_input): + # NOTE: geojson doesn't go through this anymore, with adding geopandas + # parser. Instead, make this happen AFTER shapes are loaded/transformed + # to geojson, to simplify EVERYTHING handed to us down. + if isinstance(json_input, type({})): + # If it's a dict, try to load the minimal required for a shape. + # Then recurse on every object, just incase more are nested inside: + try: + new_shape = { "type": json_input["type"], "coordinates": json_input["coordinates"] } + yield new_shape + except KeyError: + pass + for key_value_pair in json_input.items(): + yield from recurse_find_geojson(key_value_pair[1]) + # If it's a list, just loop through it: + elif isinstance(json_input, type([])): + for item in json_input: + yield from recurse_find_geojson(item) + +# Takes a json, and returns a possibly-simplified wkt_str +# Used by both parse_geojson, and parse_kml +def json_to_wkt(geojson): + geojson_list = [] + for new_shape in recurse_find_geojson(geojson): + geojson_list.append(new_shape) + + if len(geojson_list) == 0: + return {'error': {'type': 'VALUE', 'report': 'Could not find any shapes inside geojson.'}} + elif len(geojson_list) == 1: + wkt_json = geojson_list[0] + else: + wkt_json = { 'type': 'GeometryCollection', 'geometries': geojson_list } + + try: + wkt_str = wkt.dumps(wkt_json) + except (KeyError, ValueError) as e: + return {'error': {'type': 'VALUE', 'report': 'Problem converting a shape to string: {0}'.format(str(e))}} + return wkt_str + + +def parse_geojson(f): + try: + data = f.read() + geojson = json.loads(data) + except json.JSONDecodeError as e: + return {'error': {'type': 'DECODE', 'report': 'Could not parse GeoJSON: {0}'.format(str(e))}} + except KeyError as e: + return {'error': {'type': 'KEY', 'report': 'Missing expected key: {0}'.format(str(e))}} + except ValueError as e: + return {'error': {'type': 'VALUE', 'report': 'Could not parse GeoJSON: {0}'.format(str(e))}} + return json_to_wkt(geojson) + + +def parse_kml(f): + try: + kml_str = f.read() + kml_root = md.parseString(kml_str, forbid_dtd=True) + wkt_json = kml2json(kml_root) + # All these BUT the type/value errors are for the md.parseString: + # except (DefusedXmlException, DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden, NotSupportedError, TypeError, ValueError) as e: + except Exception as e: + return {'error': {'type': 'VALUE', 'report': 'Could not parse kml: {0}'.format(str(e))}} + return json_to_wkt(wkt_json) + +def parse_shapefile(fileset): + try: + reader = shapefile.Reader(**fileset) + shapes = [i.__geo_interface__ for i in reader.shapes()] + # In the sourcecode, it looks like sometimes the reader throws "Exception": + except Exception as e: + return {'error': {'type': 'VALUE', 'report': 'Could not parse shp: {0}'.format(str(e))}} + wkt_json = {'type':'GeometryCollection', 'geometries': shapes } + wkt_str = json_to_wkt(wkt_json) + return wkt_str diff --git a/asf_search/WKT/__init__.py b/asf_search/WKT/__init__.py index b3cb6ee8..81c8854e 100644 --- a/asf_search/WKT/__init__.py +++ b/asf_search/WKT/__init__.py @@ -1,2 +1,3 @@ from .validate_wkt import validate_wkt from .RepairEntry import RepairEntry +from .FilesToWKT import filesToWKT diff --git a/asf_search/__init__.py b/asf_search/__init__.py index 91e88f22..68cf208a 100644 --- a/asf_search/__init__.py +++ b/asf_search/__init__.py @@ -34,7 +34,7 @@ from .download import * from .CMR import * from .baseline import * -from .WKT import validate_wkt +from .WKT import validate_wkt, filesToWKT from .export import * REPORT_ERRORS=True diff --git a/asf_search/export/csv.py b/asf_search/export/csv.py index 575e7320..2be9b71b 100644 --- a/asf_search/export/csv.py +++ b/asf_search/export/csv.py @@ -20,7 +20,7 @@ ('doppler', ['AdditionalAttributes', ('Name', 'DOPPLER'), 'Values', 0]), ('sizeMB', ['DataGranule', 'ArchiveAndDistributionInformation', 0, 'Size']), ('insarStackSize', ['AdditionalAttributes', ('Name', 'INSAR_STACK_SIZE'), 'Values', 0]), - ('offNadirAngle', ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0]) + ('offNadirAngle', ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0]), ] fieldnames = ( @@ -122,7 +122,7 @@ def getItem(self, p): "Sensor":p.get('sensor'), "Beam Mode":p.get('beamModeType'), "Beam Mode Description":p.get('configurationName'), - "Orbit":p.get('orbit'), + "Orbit":p.get('orbit') if not isinstance(p.get('orbit'), list) else p.get('orbit')[0], "Path Number":p.get('pathNumber'), "Frame Number":p.get('frameNumber'), "Acquisition Date":p.get('sceneDate'), diff --git a/setup.py b/setup.py index d88008cf..fe8c1ede 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,12 @@ ] extra_requirements = [ - "remotezip>=0.10.0" + "remotezip>=0.10.0", # required for remote zip functionality + "geopandas", # required for FilesToWKT functionality + "geomet", + "kml2geojson", + "shapefile", + "zipfile" ] From 460e261d5f7c30cd96d66075bf9ca09fc5957f6a Mon Sep 17 00:00:00 2001 From: kim Date: Tue, 9 Apr 2024 11:35:57 -0800 Subject: [PATCH 22/30] Exposes esa_frame in RADARSAT Product, orbit can be list in csv (for now) --- asf_search/ASFSearchOptions/ASFSearchOptions.py | 2 +- asf_search/Products/RADARSATProduct.py | 1 + asf_search/export/csv.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/asf_search/ASFSearchOptions/ASFSearchOptions.py b/asf_search/ASFSearchOptions/ASFSearchOptions.py index 8b1103e7..d8c83aff 100644 --- a/asf_search/ASFSearchOptions/ASFSearchOptions.py +++ b/asf_search/ASFSearchOptions/ASFSearchOptions.py @@ -69,7 +69,7 @@ def __str__(self): """ What to display if `print(opts)` is called. """ - return json.dumps(dict(self), indent=4) + return json.dumps(dict(self), default=str, indent=4) # Default is set to '...', since 'None' is a very valid value here def pop(self, key, default=...): diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 734194d1..a2958e1c 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -14,6 +14,7 @@ class RADARSATProduct(ASFStackableProduct): 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) + 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): diff --git a/asf_search/export/csv.py b/asf_search/export/csv.py index 2be9b71b..47af555e 100644 --- a/asf_search/export/csv.py +++ b/asf_search/export/csv.py @@ -122,7 +122,7 @@ def getItem(self, p): "Sensor":p.get('sensor'), "Beam Mode":p.get('beamModeType'), "Beam Mode Description":p.get('configurationName'), - "Orbit":p.get('orbit') if not isinstance(p.get('orbit'), list) else p.get('orbit')[0], + "Orbit":p.get('orbit'), "Path Number":p.get('pathNumber'), "Frame Number":p.get('frameNumber'), "Acquisition Date":p.get('sceneDate'), From 3b81b7529b36b2fb96daabdf33d025c3699aaffb Mon Sep 17 00:00:00 2001 From: kim Date: Wed, 17 Apr 2024 08:12:09 -0800 Subject: [PATCH 23/30] adds some log messages for measuring performance --- asf_search/ASFProduct.py | 33 +++++++----------------- asf_search/ASFStackableProduct.py | 9 ------- asf_search/Products/AIRSARProduct.py | 10 ++----- asf_search/Products/ALOSProduct.py | 10 ++----- asf_search/Products/ARIAS1GUNWProduct.py | 10 ++----- asf_search/Products/ERSProduct.py | 10 ++----- asf_search/Products/JERSProduct.py | 10 ++----- asf_search/Products/NISARProduct.py | 10 ++----- asf_search/Products/OPERAS1Product.py | 10 ++----- asf_search/Products/RADARSATProduct.py | 10 ++----- asf_search/Products/S1BurstProduct.py | 12 +++------ asf_search/Products/S1Product.py | 10 ++----- asf_search/Products/SEASATProduct.py | 10 ++----- asf_search/Products/SIRCProduct.py | 10 ++----- asf_search/Products/SMAPProduct.py | 10 ++----- asf_search/Products/UAVSARProduct.py | 10 ++----- asf_search/search/baseline_search.py | 11 ++------ asf_search/search/search.py | 6 ++++- asf_search/search/search_generator.py | 23 ++++++++++++++--- 19 files changed, 65 insertions(+), 159 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index a6dcf18d..62b4bb91 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -41,7 +41,7 @@ class ASFProduct: def get_classname(cls): return cls.__name__ - _base_properties = { + _properties_paths = { # min viable product 'centerLat': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0], 'cast': try_parse_float}, 'centerLon': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0], 'cast': try_parse_float}, @@ -67,16 +67,14 @@ def get_classname(cls): 'sensor': {'path': [ 'Platforms', 0, 'Instruments', 0, 'ShortName'], }, } """ - _base_properties dictionary, mapping readable property names to paths and optional type casting + _properties_paths dictionary, mapping readable property names to paths and optional type casting entries are organized as such: - `PROPERTY_NAME`: The name the property should be called in `ASFProduct.properties` - `path`: the expected path in the CMR UMM json granule response as a list - `cast`: (optional): the optional type casting method - Defining `_base_properties` in subclasses allows for defining custom properties or overiding existing ones. - See `S1Product.get_property_paths()` on how subclasses are expected to - combine `ASFProduct._base_properties` with their own separately defined `_base_properties` + Defining `_properties_paths` in subclasses allows for defining custom properties or overiding existing ones. """ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): @@ -263,9 +261,11 @@ def translate_product(self, item: Dict) -> Dict: umm = item.get('umm') + additionalAttributes = {attr['Name']: attr['Values'] for attr in umm['AdditionalAttributes']} + properties = { - prop: self._read_umm_property(umm, umm_mapping) - for prop, umm_mapping in self.get_property_paths().items() + prop: additionalAttributes.get(umm_mapping['path'][1][1])[0] if umm_mapping[0] == 'AdditionalAttributes' else self._read_umm_property(umm, umm_mapping) + for prop, umm_mapping in self._properties_paths } if properties.get('url') is not None: @@ -282,19 +282,6 @@ def translate_product(self, item: Dict) -> Dict: return {'geometry': geometry, 'properties': properties, 'type': 'Feature'} - # ASFProduct subclasses define extra/override param key + UMM pathing here - @staticmethod - def get_property_paths() -> Dict: - """ - Returns _base_properties of class, subclasses such as `S1Product` (or user provided subclasses) can override this to - define which properties they want in their subclass's properties dict. - - (See `S1Product.get_property_paths()` for example of combining _base_properties of multiple classes) - - :returns dictionary, {`PROPERTY_NAME`: {'path': [umm, path, to, value], 'cast (optional)': Callable_to_cast_value}, ...} - """ - return ASFProduct._base_properties - def get_sort_keys(self) -> Tuple: """ Returns tuple of primary and secondary date values used for sorting final search results @@ -376,7 +363,9 @@ def umm_get(item: Dict, *args): if item is None: return None for key in args: - if isinstance(key, int): + if isinstance(key, str): + item = item.get(key) + elif isinstance(key, int): item = item[key] if key < len(item) else None elif isinstance(key, tuple): (a, b) = key @@ -399,8 +388,6 @@ def umm_get(item: Dict, *args): break if not found: return None - else: - item = item.get(key) if item is None: return None if item in [None, 'NA', 'N/A', '']: diff --git a/asf_search/ASFStackableProduct.py b/asf_search/ASFStackableProduct.py index b85f0626..e41d274e 100644 --- a/asf_search/ASFStackableProduct.py +++ b/asf_search/ASFStackableProduct.py @@ -13,8 +13,6 @@ class ASFStackableProduct(ASFProduct): ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ - _base_properties = { - } class BaselineCalcType(Enum): """ @@ -53,13 +51,6 @@ def get_stack_opts(self, opts: ASFSearchOptions = None): stack_opts.insarStackId = self.properties['insarStackId'] return stack_opts - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **ASFStackableProduct._base_properties - } - def is_valid_reference(self): # we don't stack at all if any of stack is missing insarBaseline, unlike stacking S1 products(?) if 'insarBaseline' not in self.baseline: diff --git a/asf_search/Products/AIRSARProduct.py b/asf_search/Products/AIRSARProduct.py index 54c2c03c..aa1e0e8b 100644 --- a/asf_search/Products/AIRSARProduct.py +++ b/asf_search/Products/AIRSARProduct.py @@ -7,7 +7,8 @@ class AIRSARProduct(ASFProduct): """ ASF Dataset Overview Page: https://asf.alaska.edu/data-sets/sar-data-sets/airsar/ """ - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, @@ -16,10 +17,3 @@ class AIRSARProduct(ASFProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **AIRSARProduct._base_properties - } diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index d90902f7..035b3d1f 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -10,7 +10,8 @@ class ALOSProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'offNadirAngle': {'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0], 'cast': try_parse_float}, @@ -31,10 +32,3 @@ def get_default_baseline_product_type() -> Union[str, None]: Returns the product type to search for when building a baseline stack. """ return PRODUCT_TYPE.L1_1 - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **ALOSProduct._base_properties - } diff --git a/asf_search/Products/ARIAS1GUNWProduct.py b/asf_search/Products/ARIAS1GUNWProduct.py index 2d88419a..030a2399 100644 --- a/asf_search/Products/ARIAS1GUNWProduct.py +++ b/asf_search/Products/ARIAS1GUNWProduct.py @@ -11,7 +11,8 @@ class ARIAS1GUNWProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/derived-data-sets/sentinel-1-interferograms/ """ - _base_properties = { + _properties_paths = { + **S1Product._properties_paths, 'perpendicularBaseline': {'path': ['AdditionalAttributes', ('Name', 'PERPENDICULAR_BASELINE'), 'Values', 0], 'cast': try_parse_float}, 'orbit': {'path': ['OrbitCalculatedSpatialDomains']} } @@ -26,13 +27,6 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): self.properties['fileName'] = self.properties['fileID'] + '.' + urls[0].split('.')[-1] self.properties['additionalUrls'] = [urls[1]] - @staticmethod - def get_property_paths() -> Dict: - return { - **S1Product.get_property_paths(), - **ARIAS1GUNWProduct._base_properties - } - def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ Build search options that can be used to find an insar stack for this product diff --git a/asf_search/Products/ERSProduct.py b/asf_search/Products/ERSProduct.py index a2dbff98..4a53d35c 100644 --- a/asf_search/Products/ERSProduct.py +++ b/asf_search/Products/ERSProduct.py @@ -11,7 +11,8 @@ class ERSProduct(ASFStackableProduct): ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]}, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0]}, @@ -23,13 +24,6 @@ class ERSProduct(ASFStackableProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **ERSProduct._base_properties - } - @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ diff --git a/asf_search/Products/JERSProduct.py b/asf_search/Products/JERSProduct.py index 1963225f..f829a760 100644 --- a/asf_search/Products/JERSProduct.py +++ b/asf_search/Products/JERSProduct.py @@ -7,7 +7,8 @@ class JERSProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/jers-1/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, @@ -24,10 +25,3 @@ def get_default_baseline_product_type() -> Union[str, None]: Returns the product type to search for when building a baseline stack. """ return PRODUCT_TYPE.L0 - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **JERSProduct._base_properties - } diff --git a/asf_search/Products/NISARProduct.py b/asf_search/Products/NISARProduct.py index 279e014a..5ca3e239 100644 --- a/asf_search/Products/NISARProduct.py +++ b/asf_search/Products/NISARProduct.py @@ -10,7 +10,8 @@ class NISARProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']} } @@ -40,13 +41,6 @@ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: :return: ASFSearchOptions describing appropriate options for building a stack from this product """ return None - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **NISARProduct._base_properties - } def get_sort_keys(self): keys = super().get_sort_keys() diff --git a/asf_search/Products/OPERAS1Product.py b/asf_search/Products/OPERAS1Product.py index d205b840..95efc891 100644 --- a/asf_search/Products/OPERAS1Product.py +++ b/asf_search/Products/OPERAS1Product.py @@ -8,7 +8,8 @@ class OPERAS1Product(S1Product): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/opera/ """ - _base_properties = { + _properties_paths = { + **S1Product._properties_paths, 'centerLat': {'path': []}, # Opera products lacks these fields 'centerLon': {'path': []}, 'frameNumber': {'path': []}, @@ -46,13 +47,6 @@ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): def get_stack_opts(self, opts: ASFSearchOptions = ASFSearchOptions()) -> ASFSearchOptions: return opts - @staticmethod - def get_property_paths() -> Dict: - return { - **S1Product.get_property_paths(), - **OPERAS1Product._base_properties - } - @staticmethod def get_default_baseline_product_type() -> None: """ diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index a2958e1c..6d2a842a 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -8,7 +8,8 @@ class RADARSATProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/radarsat-1/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, @@ -20,13 +21,6 @@ class RADARSATProduct(ASFStackableProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **RADARSATProduct._base_properties - } - @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ diff --git a/asf_search/Products/S1BurstProduct.py b/asf_search/Products/S1BurstProduct.py index f4f7a249..7d35f923 100644 --- a/asf_search/Products/S1BurstProduct.py +++ b/asf_search/Products/S1BurstProduct.py @@ -17,7 +17,8 @@ class S1BurstProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/ """ - _base_properties = { + _properties_paths = { + **S1Product._properties_paths, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]}, 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int}, 'relativeBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_RELATIVE'), 'Values', 0], 'cast': try_parse_int}, @@ -65,14 +66,7 @@ def get_stack_opts(self, opts: ASFSearchOptions = None): stack_opts.fullBurstID = self.properties['burst']['fullBurstID'] stack_opts.polarization = [self.properties['polarization']] return stack_opts - - @staticmethod - def get_property_paths() -> Dict: - return { - **S1Product.get_property_paths(), - **S1BurstProduct._base_properties - } - + def _get_additional_filenames_and_urls(self, default_filename: str = None): # Burst XML filenames are just numbers, this makes it more indentifiable if default_filename is None: diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index 45ea8fc3..c6f24ff5 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -15,7 +15,8 @@ class S1Product(ASFStackableProduct): ASF Dataset Overview Page: https://asf.alaska.edu/datasets/daac/sentinel-1/ """ - _base_properties = { + _properties_paths = { + **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, @@ -120,13 +121,6 @@ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: return stack_opts - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFStackableProduct.get_property_paths(), - **S1Product._base_properties - } - def is_valid_reference(self) -> bool: keys = ['postPosition', 'postPositionTime', 'prePosition', 'postPositionTime'] diff --git a/asf_search/Products/SEASATProduct.py b/asf_search/Products/SEASATProduct.py index e726d756..1158fcf1 100644 --- a/asf_search/Products/SEASATProduct.py +++ b/asf_search/Products/SEASATProduct.py @@ -7,7 +7,8 @@ class SEASATProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/seasat/ """ - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'bytes': {'path': [ 'AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, @@ -15,10 +16,3 @@ class SEASATProduct(ASFProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **SEASATProduct._base_properties - } diff --git a/asf_search/Products/SIRCProduct.py b/asf_search/Products/SIRCProduct.py index e5e9ad31..e2b05df3 100644 --- a/asf_search/Products/SIRCProduct.py +++ b/asf_search/Products/SIRCProduct.py @@ -5,7 +5,8 @@ class SIRCProduct(ASFProduct): """ Dataset Documentation Page: https://eospso.nasa.gov/missions/spaceborne-imaging-radar-c """ - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion'] }, @@ -14,10 +15,3 @@ class SIRCProduct(ASFProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **SIRCProduct._base_properties - } diff --git a/asf_search/Products/SMAPProduct.py b/asf_search/Products/SMAPProduct.py index f78f00e0..a2750032 100644 --- a/asf_search/Products/SMAPProduct.py +++ b/asf_search/Products/SMAPProduct.py @@ -7,7 +7,8 @@ class SMAPProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/ """ - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, @@ -15,10 +16,3 @@ class SMAPProduct(ASFProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **SMAPProduct._base_properties - } diff --git a/asf_search/Products/UAVSARProduct.py b/asf_search/Products/UAVSARProduct.py index 73acd812..f33a39dc 100644 --- a/asf_search/Products/UAVSARProduct.py +++ b/asf_search/Products/UAVSARProduct.py @@ -7,7 +7,8 @@ class UAVSARProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/uavsar/ """ - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, @@ -15,10 +16,3 @@ class UAVSARProduct(ASFProduct): def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) - - @staticmethod - def get_property_paths() -> Dict: - return { - **ASFProduct.get_property_paths(), - **UAVSARProduct._base_properties - } diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index 4d7da17f..77b01f7c 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -100,18 +100,11 @@ def _cast_to_subclass(product: ASFProduct, subclass: Type[ASFProduct]) -> ASFPro example: ``` class MyCustomClass(ASFProduct): - _base_properties = { + _properties_paths = { + **ASFProduct._properties_paths, 'some_unique_property': {'path': ['AdditionalAttributes', 'UNIQUE_PROPERTY', ...]} } - ... - - @staticmethod - def get_property_paths() -> dict: - return { - **ASFProduct.get_property_paths(), - **MyCustomClass._base_properties - } # subclass as constructor customReference = reference.cast_to_subclass(MyCustomClass) diff --git a/asf_search/search/search.py b/asf_search/search/search.py index 88818012..cbee1da7 100644 --- a/asf_search/search/search.py +++ b/asf_search/search/search.py @@ -1,8 +1,9 @@ +import time from typing import Union, Sequence, Tuple from copy import copy import datetime -from asf_search import ASFSearchResults +from asf_search import ASF_LOGGER, ASFSearchResults from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.search.search_generator import search_generator @@ -97,10 +98,13 @@ def search( results = ASFSearchResults([]) # The last page will be marked as complete if results sucessful + perf = time.time() for page in search_generator(opts=opts): + ASF_LOGGER.warning(f"Page Time Elapsed {time.time() - perf}") results.extend(page) results.searchComplete = page.searchComplete results.searchOptions = page.searchOptions + perf = time.time() results.raise_if_incomplete() results.sort(key=lambda p: p.get_sort_keys(), reverse=True) diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index bc7114b3..e6d70676 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -1,4 +1,5 @@ import logging +import time from typing import Dict, Generator, Union, Sequence, Tuple, List from copy import copy from requests.exceptions import HTTPError @@ -92,7 +93,6 @@ def search_generator( queries = build_subqueries(opts) for query in queries: translated_opts = translate_opts(query) - ASF_LOGGER.warning(f"TRANSLATED PARAMS: {translated_opts}") cmr_search_after_header = "" subquery_count = 0 @@ -113,7 +113,9 @@ def search_generator( raise opts.session.headers.update({'CMR-Search-After': cmr_search_after_header}) + perf = time.time() last_page = process_page(items, maxResults, subquery_max_results, total, subquery_count, opts) + ASF_LOGGER.warning(f"Page Processing Time {time.time() - perf}") subquery_count += len(last_page) total += len(last_page) last_page.searchComplete = subquery_count == subquery_max_results or total == maxResults @@ -137,9 +139,12 @@ def search_generator( def query_cmr(session: ASFSession, url: str, translated_opts: Dict, sub_query_count: int): response = get_page(session=session, url=url, translated_opts=translated_opts) + perf = time.time() items = [as_ASFProduct(f, session=session) for f in response.json()['items']] + ASF_LOGGER.warning(f"Product Subclassing Time {time.time() - perf}") hits: int = response.json()['hits'] # total count of products given search opts - + # 9-10 per process + # 3.9-5 per process # sometimes CMR returns results with the wrong page size if len(items) != INTERNAL.CMR_PAGE_SIZE and len(items) + sub_query_count < hits: raise CMRIncompleteError(f"CMR returned page of incomplete results. Expected {min(INTERNAL.CMR_PAGE_SIZE, hits - sub_query_count)} results, got {len(items)}") @@ -161,6 +166,8 @@ def process_page(items: List[ASFProduct], max_results: int, subquery_max_results stop=stop_after_attempt(3), ) def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: + + perf = time.time() try: response = session.post(url=url, data=translated_opts, timeout=INTERNAL.CMR_TIMEOUT) response.raise_for_status() @@ -173,6 +180,7 @@ def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: except ReadTimeout as exc: raise ASFSearchError(f'Connection Error (Timeout): CMR took too long to respond. Set asf constant "CMR_TIMEOUT" to increase. ({url=}, timeout={INTERNAL.CMR_TIMEOUT})') from exc + ASF_LOGGER.warning(f"Query Time Elapsed {time.time() - perf}") return response @@ -246,6 +254,8 @@ def set_platform_alias(opts: ASFSearchOptions): opts.platform = list(set(platform_list)) +_dataset_collection_items = dataset_collections.items() + def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: """ Returns the granule umm as the corresponding ASFProduct subclass, or ASFProduct if no equivalent is found @@ -258,18 +268,23 @@ def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: product_type_key = _get_product_type_key(item) # if there's a direct entry in our dataset to product type dict + # perf = time.time() subclass = dataset_to_product_types.get(product_type_key) if subclass is not None: + # ASF_LOGGER.warning(f'subclass selection time {time.time() - perf}') return subclass(item, session=session) # or if the key matches one of the shortnames in any of our datasets - for dataset, collections in dataset_collections.items(): + + for dataset, collections in _dataset_collection_items: if collections.get(product_type_key) is not None: subclass = dataset_to_product_types.get(dataset) if subclass is not None: + # ASF_LOGGER.warning(f'subclass selection time {time.time() - perf}') return subclass(item, session=session) break # dataset exists, but is not in dataset_to_product_types yet - + + # ASF_LOGGER.warning(f'subclass selection time {time.time() - perf}') return ASFProduct(item, session=session) def _get_product_type_key(item: Dict) -> str: From 967ffc17aff61831716a47f63780fc97c1f25bf4 Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 6 May 2024 08:34:03 -0800 Subject: [PATCH 24/30] reverts additional attribute optimization (for now) --- asf_search/ASFProduct.py | 6 +++--- asf_search/search/search_generator.py | 9 ++++----- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index 7cd43f97..14c2e357 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -261,11 +261,11 @@ def translate_product(self, item: Dict) -> Dict: umm = item.get('umm') - additionalAttributes = {attr['Name']: attr['Values'] for attr in umm['AdditionalAttributes']} + # additionalAttributes = {attr['Name']: attr['Values'] for attr in umm['AdditionalAttributes']} properties = { - prop: additionalAttributes.get(umm_mapping['path'][1][1])[0] if umm_mapping[0] == 'AdditionalAttributes' else self._read_umm_property(umm, umm_mapping) - for prop, umm_mapping in self._properties_paths + prop: self._read_umm_property(umm, umm_mapping) + for prop, umm_mapping in self._properties_paths.items() } if properties.get('url') is not None: diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index 798df01c..f002d2f1 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -112,8 +112,8 @@ def search_generator( try: ASF_LOGGER.debug(f'SUBQUERY {subquery_idx + 1}: Fetching page {page_number}') items, subquery_max_results, cmr_search_after_header = query_cmr(opts.session, url, translated_opts, subquery_count) - except (ASFSearchError, CMRIncompleteError) as e: - message = str(e) + except (ASFSearchError, CMRIncompleteError) as exc: + message = str(exc) ASF_LOGGER.error(message) report_search_error(query, message) opts.session.headers.pop('CMR-Search-After', None) @@ -274,7 +274,6 @@ def set_platform_alias(opts: ASFSearchOptions): platform_list.append(plat) opts.platform = list(set(platform_list)) - _dataset_collection_items = dataset_collections.items() def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: @@ -290,13 +289,13 @@ def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: # if there's a direct entry in our dataset to product type dict # perf = time.time() - subclass = _dataset_collection_items.get(product_type_key) + subclass = dataset_collections.get(product_type_key) if subclass is not None: # ASF_LOGGER.warning(f'subclass selection time {time.time() - perf}') return subclass(item, session=session) # if the key matches one of the shortnames in any of our datasets - for dataset, collections in dataset_collections.items(): + for dataset, collections in _dataset_collection_items: if collections.get(product_type_key) is not None: subclass = dataset_to_product_types.get(dataset) if subclass is not None: From 151b1f21787c1d81b2178fbae08a5c8dbd9893fc Mon Sep 17 00:00:00 2001 From: kim Date: Mon, 6 May 2024 16:21:49 -0800 Subject: [PATCH 25/30] use dict.get() in kml --- asf_search/export/kml.py | 12 ++++++------ asf_search/search/search_generator.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/asf_search/export/kml.py b/asf_search/export/kml.py index 1486a1f8..c2dadcad 100644 --- a/asf_search/export/kml.py +++ b/asf_search/export/kml.py @@ -139,12 +139,12 @@ def getItem(self, p): # Helper method for getting additional fields in
    tag def metadata_fields(self, item: Dict): required = { - 'Processing type: ': item['processingTypeDisplay'], - 'Frame: ': item['frameNumber'], - 'Path: ': item['pathNumber'], - 'Orbit: ': item['orbit'], - 'Start time: ': item['startTime'], - 'End time: ': item['stopTime'], + 'Processing type: ': item.get('processingTypeDisplay'), + 'Frame: ': item.get('frameNumber'), + 'Path: ': item.get('pathNumber'), + 'Orbit: ': item.get('orbit'), + 'Start time: ': item.get('startTime'), + 'End time: ': item.get('stopTime'), } optional = {} diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index f002d2f1..b2dffe3e 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -289,7 +289,7 @@ def as_ASFProduct(item: Dict, session: ASFSession) -> ASFProduct: # if there's a direct entry in our dataset to product type dict # perf = time.time() - subclass = dataset_collections.get(product_type_key) + subclass = dataset_to_product_types.get(product_type_key) if subclass is not None: # ASF_LOGGER.warning(f'subclass selection time {time.time() - perf}') return subclass(item, session=session) From abcbc0d39530707ca336c66f1f14c7ef2b28f943 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 10:31:52 -0800 Subject: [PATCH 26/30] Fixes customized CMR_TIMEOUT, sets timeout to 60 on intersection test --- asf_search/search/search_generator.py | 6 +++--- tests/ASFSearchResults/test_ASFSearchResults.py | 15 ++++++++++----- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/asf_search/search/search_generator.py b/asf_search/search/search_generator.py index c7568118..e4bc4b12 100644 --- a/asf_search/search/search_generator.py +++ b/asf_search/search/search_generator.py @@ -185,10 +185,10 @@ def process_page(items: List[ASFProduct], max_results: int, subquery_max_results stop=stop_after_attempt(3), ) def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: - + from asf_search.constants.INTERNAL import CMR_TIMEOUT perf = time.time() try: - response = session.post(url=url, data=translated_opts, timeout=INTERNAL.CMR_TIMEOUT) + response = session.post(url=url, data=translated_opts, timeout=CMR_TIMEOUT) response.raise_for_status() except HTTPError as exc: error_message = f'HTTP {response.status_code}: {response.json()["errors"]}' @@ -197,7 +197,7 @@ def get_page(session: ASFSession, url: str, translated_opts: List) -> Response: if 500 <= response.status_code <= 599: raise ASFSearch5xxError(error_message) from exc except ReadTimeout as exc: - raise ASFSearchError(f'Connection Error (Timeout): CMR took too long to respond. Set asf constant "CMR_TIMEOUT" to increase. ({url=}, timeout={INTERNAL.CMR_TIMEOUT})') from exc + raise ASFSearchError(f'Connection Error (Timeout): CMR took too long to respond. Set asf constant "asf_search.constants.INTERNAL.CMR_TIMEOUT" to increase. ({url=}, timeout={CMR_TIMEOUT})') from exc ASF_LOGGER.warning(f"Query Time Elapsed {time.time() - perf}") return response diff --git a/tests/ASFSearchResults/test_ASFSearchResults.py b/tests/ASFSearchResults/test_ASFSearchResults.py index 7f80a124..397bc051 100644 --- a/tests/ASFSearchResults/test_ASFSearchResults.py +++ b/tests/ASFSearchResults/test_ASFSearchResults.py @@ -18,6 +18,8 @@ from asf_search.constants import PLATFORM import re +from asf_search.exceptions import ASFSearchError + # when this replaces SearchAPI change values to cached API_URL = 'https://api.daac.asf.alaska.edu/services/search/param?' @@ -198,18 +200,21 @@ def run_test_ASFSearchResults_intersection(wkt: str): # exclude SMAP products platforms = [ - PLATFORM.ALOS, PLATFORM.SENTINEL1, - PLATFORM.SIRC, PLATFORM.UAVSAR ] def overlap_check(s1: BaseGeometry, s2: BaseGeometry): return s1.overlaps(s2) or s1.touches(s2) or s2.distance(s1) <= 0.005 - + asf.constants.INTERNAL.CMR_TIMEOUT = 60 for platform in platforms: - results = asf.geo_search(intersectsWith=wkt, platform=platform, maxResults=250) - + try: + results = asf.geo_search(intersectsWith=wkt, platform=platform, maxResults=250) + except ASFSearchError as exc: + asf.constants.INTERNAL.CMR_TIMEOUT = 30 + raise BaseException(f'Failed to perform intersection test with wkt: {wkt}\nplatform: {platform}.\nOriginal exception: {exc}') + + asf.constants.INTERNAL.CMR_TIMEOUT = 30 for product in results: if shape(product.geometry).is_valid: product_geom_wrapped, product_geom_unwrapped, _ = asf.validate_wkt(shape(product.geometry)) From fde4224c5e0666632db2fccf43ba1da885f19d55 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 11:53:33 -0800 Subject: [PATCH 27/30] changes _properties_paths back to _base_properties --- asf_search/ASFProduct.py | 17 +------- asf_search/CMR/subquery.py | 22 ---------- asf_search/Products/AIRSARProduct.py | 2 +- asf_search/Products/ALOSProduct.py | 2 +- asf_search/Products/ARIAS1GUNWProduct.py | 2 +- asf_search/Products/ERSProduct.py | 2 +- asf_search/Products/JERSProduct.py | 2 +- asf_search/Products/NISARProduct.py | 2 +- asf_search/Products/OPERAS1Product.py | 2 +- asf_search/Products/RADARSATProduct.py | 2 +- asf_search/Products/S1BurstProduct.py | 2 +- asf_search/Products/S1Product.py | 2 +- asf_search/Products/SEASATProduct.py | 2 +- asf_search/Products/SIRCProduct.py | 2 +- asf_search/Products/SMAPProduct.py | 2 +- asf_search/Products/UAVSARProduct.py | 2 +- asf_search/search/baseline_search.py | 2 +- examples/5-Download.ipynb | 52 ++++++++++++++++++++++-- 18 files changed, 66 insertions(+), 55 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index 55f5290a..305352c5 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -41,7 +41,7 @@ class ASFProduct: def get_classname(cls): return cls.__name__ - _properties_paths = { + _base_properties = { # min viable product 'centerLat': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0], 'cast': try_parse_float}, 'centerLon': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0], 'cast': try_parse_float}, @@ -67,7 +67,7 @@ def get_classname(cls): 'sensor': {'path': [ 'Platforms', 0, 'Instruments', 0, 'ShortName'], }, } """ - _properties_paths dictionary, mapping readable property names to paths and optional type casting + _base_properties dictionary, mapping readable property names to paths and optional type casting entries are organized as such: - `PROPERTY_NAME`: The name the property should be called in `ASFProduct.properties` @@ -282,19 +282,6 @@ def translate_product(self, item: Dict) -> Dict: return {'geometry': geometry, 'properties': properties, 'type': 'Feature'} - # ASFProduct subclasses define extra/override param key + UMM pathing here - @staticmethod - def get_property_paths() -> Dict: - """ - Returns _base_properties of class, subclasses such as `S1Product` (or user provided subclasses) can override this to - define which properties they want in their subclass's properties dict. - - (See `S1Product.get_property_paths()` for example of combining _base_properties of multiple classes) - - :returns dictionary, {`PROPERTY_NAME`: {'path': [umm, path, to, value], 'cast (optional)': Callable_to_cast_value}, ...} - """ - return ASFProduct._base_properties - def get_sort_keys(self) -> Tuple[str, str]: """ Returns tuple of primary and secondary date values used for sorting final search results diff --git a/asf_search/CMR/subquery.py b/asf_search/CMR/subquery.py index d11b7c4c..db48c213 100644 --- a/asf_search/CMR/subquery.py +++ b/asf_search/CMR/subquery.py @@ -137,28 +137,6 @@ def format_query_params(params) -> List[List[dict]]: def translate_param(param_name, param_val) -> List[dict]: - # param_list = [] - - # cmr_input_map = field_map - - # param_input_map = cmr_input_map[param_name] - # cmr_param = param_input_map['key'] - # cmr_format_str = param_input_map['fmt'] - - # if not isinstance(param_val, list): - # param_val = [param_val] - - # for l in param_val: - # format_val = l - - # if isinstance(l, list): - # format_val = ','.join([f'{t}' for t in l]) - - # param_list.append({ - # cmr_param: cmr_format_str.format(format_val) - # }) - - # return param_list param_list = [] if not isinstance(param_val, list): diff --git a/asf_search/Products/AIRSARProduct.py b/asf_search/Products/AIRSARProduct.py index aa1e0e8b..0f5e2902 100644 --- a/asf_search/Products/AIRSARProduct.py +++ b/asf_search/Products/AIRSARProduct.py @@ -7,7 +7,7 @@ class AIRSARProduct(ASFProduct): """ ASF Dataset Overview Page: https://asf.alaska.edu/data-sets/sar-data-sets/airsar/ """ - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index 035b3d1f..7052186f 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -10,7 +10,7 @@ class ALOSProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, diff --git a/asf_search/Products/ARIAS1GUNWProduct.py b/asf_search/Products/ARIAS1GUNWProduct.py index a65b6e24..f75ddb9e 100644 --- a/asf_search/Products/ARIAS1GUNWProduct.py +++ b/asf_search/Products/ARIAS1GUNWProduct.py @@ -12,7 +12,7 @@ class ARIAS1GUNWProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/derived-data-sets/sentinel-1-interferograms/ """ - _properties_paths = { + _base_properties = { **S1Product._properties_paths, 'perpendicularBaseline': {'path': ['AdditionalAttributes', ('Name', 'PERPENDICULAR_BASELINE'), 'Values', 0], 'cast': try_parse_float}, 'orbit': {'path': ['OrbitCalculatedSpatialDomains']}, diff --git a/asf_search/Products/ERSProduct.py b/asf_search/Products/ERSProduct.py index 4a53d35c..73aa447e 100644 --- a/asf_search/Products/ERSProduct.py +++ b/asf_search/Products/ERSProduct.py @@ -11,7 +11,7 @@ class ERSProduct(ASFStackableProduct): ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]}, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, diff --git a/asf_search/Products/JERSProduct.py b/asf_search/Products/JERSProduct.py index f829a760..5ea10e19 100644 --- a/asf_search/Products/JERSProduct.py +++ b/asf_search/Products/JERSProduct.py @@ -7,7 +7,7 @@ class JERSProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/jers-1/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/NISARProduct.py b/asf_search/Products/NISARProduct.py index cd4956e1..30fc97b0 100644 --- a/asf_search/Products/NISARProduct.py +++ b/asf_search/Products/NISARProduct.py @@ -10,7 +10,7 @@ class NISARProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']} } diff --git a/asf_search/Products/OPERAS1Product.py b/asf_search/Products/OPERAS1Product.py index 0c3c9a6d..91b24a78 100644 --- a/asf_search/Products/OPERAS1Product.py +++ b/asf_search/Products/OPERAS1Product.py @@ -8,7 +8,7 @@ class OPERAS1Product(S1Product): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/opera/ """ - _properties_paths = { + _base_properties = { **S1Product._properties_paths, 'centerLat': {'path': []}, # Opera products lacks these fields 'centerLon': {'path': []}, diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 7c933fe6..817465c4 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -8,7 +8,7 @@ class RADARSATProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/radarsat-1/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/S1BurstProduct.py b/asf_search/Products/S1BurstProduct.py index 7d35f923..44462544 100644 --- a/asf_search/Products/S1BurstProduct.py +++ b/asf_search/Products/S1BurstProduct.py @@ -17,7 +17,7 @@ class S1BurstProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/ """ - _properties_paths = { + _base_properties = { **S1Product._properties_paths, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]}, 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int}, diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index c6f24ff5..16b24d7c 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -15,7 +15,7 @@ class S1Product(ASFStackableProduct): ASF Dataset Overview Page: https://asf.alaska.edu/datasets/daac/sentinel-1/ """ - _properties_paths = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/SEASATProduct.py b/asf_search/Products/SEASATProduct.py index 1158fcf1..90c760e8 100644 --- a/asf_search/Products/SEASATProduct.py +++ b/asf_search/Products/SEASATProduct.py @@ -7,7 +7,7 @@ class SEASATProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/seasat/ """ - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'bytes': {'path': [ 'AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/Products/SIRCProduct.py b/asf_search/Products/SIRCProduct.py index e2b05df3..bdd41821 100644 --- a/asf_search/Products/SIRCProduct.py +++ b/asf_search/Products/SIRCProduct.py @@ -5,7 +5,7 @@ class SIRCProduct(ASFProduct): """ Dataset Documentation Page: https://eospso.nasa.gov/missions/spaceborne-imaging-radar-c """ - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/SMAPProduct.py b/asf_search/Products/SMAPProduct.py index a2750032..1e4834dd 100644 --- a/asf_search/Products/SMAPProduct.py +++ b/asf_search/Products/SMAPProduct.py @@ -7,7 +7,7 @@ class SMAPProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/ """ - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/Products/UAVSARProduct.py b/asf_search/Products/UAVSARProduct.py index f33a39dc..280cf719 100644 --- a/asf_search/Products/UAVSARProduct.py +++ b/asf_search/Products/UAVSARProduct.py @@ -7,7 +7,7 @@ class UAVSARProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/uavsar/ """ - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index c8cfc926..9ed47163 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -100,7 +100,7 @@ def _cast_to_subclass(product: ASFProduct, subclass: Type[ASFProduct]) -> ASFPro example: ``` class MyCustomClass(ASFProduct): - _properties_paths = { + _base_properties = { **ASFProduct._properties_paths, 'some_unique_property': {'path': ['AdditionalAttributes', 'UNIQUE_PROPERTY', ...]} } diff --git a/examples/5-Download.ipynb b/examples/5-Download.ipynb index 058b068c..43fb651e 100644 --- a/examples/5-Download.ipynb +++ b/examples/5-Download.ipynb @@ -20,7 +20,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "id": "db06fa80-4ac3-40b5-9787-256b422d49e6", "metadata": { "tags": [] @@ -52,7 +52,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "38a2c671-0789-4e7c-b758-5b48745b2877", "metadata": {}, "outputs": [], @@ -322,6 +322,52 @@ "listdir('./downloads3')" ] }, + { + "cell_type": "markdown", + "id": "30760f6f", + "metadata": {}, + "source": [ + "***\n", + "## S3 URIs\n", + "Some products have S3 URIs available (SENTINEL-1, OPERA, and NISAR)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "dd4a81ed", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['s3://asf-cumulus-prod-opera-browse/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1_BROWSE.png.md5',\n", + " 's3://asf-cumulus-prod-opera-browse/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1_BROWSE_low-res.png.md5',\n", + " 's3://asf-cumulus-prod-opera-browse/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1_BROWSE_thumbnail.png.md5',\n", + " 's3://asf-cumulus-prod-opera-products/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1.h5',\n", + " 's3://asf-cumulus-prod-opera-products/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1.h5.md5',\n", + " 's3://asf-cumulus-prod-opera-products/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1.iso.xml',\n", + " 's3://asf-cumulus-prod-opera-products/OPERA_L2_CSLC-S1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1/OPERA_L2_CSLC-S1_T113-241605-IW3_20240610T110743Z_20240611T073356Z_S1A_VV_v1.1.iso.xml.md5']" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "opera_product = asf.search(dataset=asf.DATASET.OPERA_S1, maxResults=1)[0]\n", + "opera_product.properties['s3Urls']" + ] + }, + { + "cell_type": "markdown", + "id": "159b5eb8", + "metadata": {}, + "source": [ + "From there authorized users can use their prefered method for authentication and downloading s3 objects." + ] + }, { "cell_type": "markdown", "id": "8aed3b7c-a557-4cbb-878e-aa9fe8330646", @@ -394,7 +440,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.11.5" }, "toc-autonumbering": false, "toc-showtags": false From 9f9e30054f56c15704bfaa30d6377394bd810a77 Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 11:57:46 -0800 Subject: [PATCH 28/30] fixes indentation on _base_properties --- asf_search/ASFProduct.py | 6 +++--- asf_search/Products/AIRSARProduct.py | 2 +- asf_search/Products/ALOSProduct.py | 2 +- asf_search/Products/ARIAS1GUNWProduct.py | 2 +- asf_search/Products/ERSProduct.py | 2 +- asf_search/Products/JERSProduct.py | 2 +- asf_search/Products/NISARProduct.py | 2 +- asf_search/Products/OPERAS1Product.py | 2 +- asf_search/Products/RADARSATProduct.py | 2 +- asf_search/Products/S1BurstProduct.py | 2 +- asf_search/Products/S1Product.py | 2 +- asf_search/Products/SEASATProduct.py | 2 +- asf_search/Products/SIRCProduct.py | 2 +- asf_search/Products/SMAPProduct.py | 2 +- asf_search/Products/UAVSARProduct.py | 2 +- asf_search/search/baseline_search.py | 2 +- 16 files changed, 18 insertions(+), 18 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index 305352c5..97df8e48 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -41,7 +41,7 @@ class ASFProduct: def get_classname(cls): return cls.__name__ - _base_properties = { + _base_properties = { # min viable product 'centerLat': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0], 'cast': try_parse_float}, 'centerLon': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0], 'cast': try_parse_float}, @@ -67,7 +67,7 @@ def get_classname(cls): 'sensor': {'path': [ 'Platforms', 0, 'Instruments', 0, 'ShortName'], }, } """ - _base_properties dictionary, mapping readable property names to paths and optional type casting + _base_properties dictionary, mapping readable property names to paths and optional type casting entries are organized as such: - `PROPERTY_NAME`: The name the property should be called in `ASFProduct.properties` @@ -169,7 +169,7 @@ def stack( :param opts: An ASFSearchOptions object describing the search parameters to be used. Search parameters specified outside this object will override in event of a conflict. :param ASFProductSubclass: An ASFProduct subclass constructor. - + :return: ASFSearchResults containing the stack, with the addition of baseline values (temporal, perpendicular) attached to each ASFProduct. """ from .search.baseline_search import stack_from_product diff --git a/asf_search/Products/AIRSARProduct.py b/asf_search/Products/AIRSARProduct.py index 0f5e2902..4da1ab0f 100644 --- a/asf_search/Products/AIRSARProduct.py +++ b/asf_search/Products/AIRSARProduct.py @@ -7,7 +7,7 @@ class AIRSARProduct(ASFProduct): """ ASF Dataset Overview Page: https://asf.alaska.edu/data-sets/sar-data-sets/airsar/ """ - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index 7052186f..47748a79 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -10,7 +10,7 @@ class ALOSProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, diff --git a/asf_search/Products/ARIAS1GUNWProduct.py b/asf_search/Products/ARIAS1GUNWProduct.py index f75ddb9e..bbd84146 100644 --- a/asf_search/Products/ARIAS1GUNWProduct.py +++ b/asf_search/Products/ARIAS1GUNWProduct.py @@ -12,7 +12,7 @@ class ARIAS1GUNWProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/derived-data-sets/sentinel-1-interferograms/ """ - _base_properties = { + _base_properties = { **S1Product._properties_paths, 'perpendicularBaseline': {'path': ['AdditionalAttributes', ('Name', 'PERPENDICULAR_BASELINE'), 'Values', 0], 'cast': try_parse_float}, 'orbit': {'path': ['OrbitCalculatedSpatialDomains']}, diff --git a/asf_search/Products/ERSProduct.py b/asf_search/Products/ERSProduct.py index 73aa447e..2e0a54de 100644 --- a/asf_search/Products/ERSProduct.py +++ b/asf_search/Products/ERSProduct.py @@ -11,7 +11,7 @@ class ERSProduct(ASFStackableProduct): ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]}, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, diff --git a/asf_search/Products/JERSProduct.py b/asf_search/Products/JERSProduct.py index 5ea10e19..153fcda4 100644 --- a/asf_search/Products/JERSProduct.py +++ b/asf_search/Products/JERSProduct.py @@ -7,7 +7,7 @@ class JERSProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/jers-1/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/NISARProduct.py b/asf_search/Products/NISARProduct.py index 30fc97b0..c409cb67 100644 --- a/asf_search/Products/NISARProduct.py +++ b/asf_search/Products/NISARProduct.py @@ -10,7 +10,7 @@ class NISARProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']} } diff --git a/asf_search/Products/OPERAS1Product.py b/asf_search/Products/OPERAS1Product.py index 91b24a78..7b03f095 100644 --- a/asf_search/Products/OPERAS1Product.py +++ b/asf_search/Products/OPERAS1Product.py @@ -8,7 +8,7 @@ class OPERAS1Product(S1Product): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/opera/ """ - _base_properties = { + _base_properties = { **S1Product._properties_paths, 'centerLat': {'path': []}, # Opera products lacks these fields 'centerLon': {'path': []}, diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 817465c4..2edeec41 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -8,7 +8,7 @@ class RADARSATProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/radarsat-1/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/S1BurstProduct.py b/asf_search/Products/S1BurstProduct.py index 44462544..671ca6c9 100644 --- a/asf_search/Products/S1BurstProduct.py +++ b/asf_search/Products/S1BurstProduct.py @@ -17,7 +17,7 @@ class S1BurstProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/ """ - _base_properties = { + _base_properties = { **S1Product._properties_paths, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]}, 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int}, diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index 16b24d7c..d400444b 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -15,7 +15,7 @@ class S1Product(ASFStackableProduct): ASF Dataset Overview Page: https://asf.alaska.edu/datasets/daac/sentinel-1/ """ - _base_properties = { + _base_properties = { **ASFStackableProduct._properties_paths, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, diff --git a/asf_search/Products/SEASATProduct.py b/asf_search/Products/SEASATProduct.py index 90c760e8..eae227f5 100644 --- a/asf_search/Products/SEASATProduct.py +++ b/asf_search/Products/SEASATProduct.py @@ -7,7 +7,7 @@ class SEASATProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/seasat/ """ - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'bytes': {'path': [ 'AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/Products/SIRCProduct.py b/asf_search/Products/SIRCProduct.py index bdd41821..77de2c82 100644 --- a/asf_search/Products/SIRCProduct.py +++ b/asf_search/Products/SIRCProduct.py @@ -5,7 +5,7 @@ class SIRCProduct(ASFProduct): """ Dataset Documentation Page: https://eospso.nasa.gov/missions/spaceborne-imaging-radar-c """ - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/SMAPProduct.py b/asf_search/Products/SMAPProduct.py index 1e4834dd..b47a8c81 100644 --- a/asf_search/Products/SMAPProduct.py +++ b/asf_search/Products/SMAPProduct.py @@ -7,7 +7,7 @@ class SMAPProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/ """ - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/Products/UAVSARProduct.py b/asf_search/Products/UAVSARProduct.py index 280cf719..a335d1a9 100644 --- a/asf_search/Products/UAVSARProduct.py +++ b/asf_search/Products/UAVSARProduct.py @@ -7,7 +7,7 @@ class UAVSARProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/uavsar/ """ - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index 9ed47163..b48751ab 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -100,7 +100,7 @@ def _cast_to_subclass(product: ASFProduct, subclass: Type[ASFProduct]) -> ASFPro example: ``` class MyCustomClass(ASFProduct): - _base_properties = { + _base_properties = { **ASFProduct._properties_paths, 'some_unique_property': {'path': ['AdditionalAttributes', 'UNIQUE_PROPERTY', ...]} } From 76769494ce04cd9955cac5ce28a5fa3741d59ebb Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 12:00:03 -0800 Subject: [PATCH 29/30] fixes old reference to properties paths --- asf_search/ASFProduct.py | 2 +- asf_search/Products/AIRSARProduct.py | 2 +- asf_search/Products/ALOSProduct.py | 2 +- asf_search/Products/ARIAS1GUNWProduct.py | 2 +- asf_search/Products/ERSProduct.py | 2 +- asf_search/Products/JERSProduct.py | 2 +- asf_search/Products/NISARProduct.py | 2 +- asf_search/Products/OPERAS1Product.py | 2 +- asf_search/Products/RADARSATProduct.py | 2 +- asf_search/Products/S1BurstProduct.py | 2 +- asf_search/Products/S1Product.py | 2 +- asf_search/Products/SEASATProduct.py | 2 +- asf_search/Products/SIRCProduct.py | 2 +- asf_search/Products/SMAPProduct.py | 2 +- asf_search/Products/UAVSARProduct.py | 2 +- asf_search/search/baseline_search.py | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/asf_search/ASFProduct.py b/asf_search/ASFProduct.py index 97df8e48..230d90f6 100644 --- a/asf_search/ASFProduct.py +++ b/asf_search/ASFProduct.py @@ -265,7 +265,7 @@ def translate_product(self, item: Dict) -> Dict: properties = { prop: self._read_umm_property(umm, umm_mapping) - for prop, umm_mapping in self._properties_paths.items() + for prop, umm_mapping in self._base_properties.items() } if properties.get('url') is not None: diff --git a/asf_search/Products/AIRSARProduct.py b/asf_search/Products/AIRSARProduct.py index 4da1ab0f..6c8bc914 100644 --- a/asf_search/Products/AIRSARProduct.py +++ b/asf_search/Products/AIRSARProduct.py @@ -8,7 +8,7 @@ class AIRSARProduct(ASFProduct): ASF Dataset Overview Page: https://asf.alaska.edu/data-sets/sar-data-sets/airsar/ """ _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, diff --git a/asf_search/Products/ALOSProduct.py b/asf_search/Products/ALOSProduct.py index 47748a79..92df7819 100644 --- a/asf_search/Products/ALOSProduct.py +++ b/asf_search/Products/ALOSProduct.py @@ -11,7 +11,7 @@ class ALOSProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/ """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'offNadirAngle': {'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0], 'cast': try_parse_float}, diff --git a/asf_search/Products/ARIAS1GUNWProduct.py b/asf_search/Products/ARIAS1GUNWProduct.py index bbd84146..91a87c95 100644 --- a/asf_search/Products/ARIAS1GUNWProduct.py +++ b/asf_search/Products/ARIAS1GUNWProduct.py @@ -13,7 +13,7 @@ class ARIAS1GUNWProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/derived-data-sets/sentinel-1-interferograms/ """ _base_properties = { - **S1Product._properties_paths, + **S1Product._base_properties, 'perpendicularBaseline': {'path': ['AdditionalAttributes', ('Name', 'PERPENDICULAR_BASELINE'), 'Values', 0], 'cast': try_parse_float}, 'orbit': {'path': ['OrbitCalculatedSpatialDomains']}, 'inputGranules': {'path': ['InputGranules']}, diff --git a/asf_search/Products/ERSProduct.py b/asf_search/Products/ERSProduct.py index 2e0a54de..8b6961aa 100644 --- a/asf_search/Products/ERSProduct.py +++ b/asf_search/Products/ERSProduct.py @@ -12,7 +12,7 @@ class ERSProduct(ASFStackableProduct): ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]}, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0]}, diff --git a/asf_search/Products/JERSProduct.py b/asf_search/Products/JERSProduct.py index 153fcda4..a70e1050 100644 --- a/asf_search/Products/JERSProduct.py +++ b/asf_search/Products/JERSProduct.py @@ -8,7 +8,7 @@ class JERSProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/jers-1/ """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/NISARProduct.py b/asf_search/Products/NISARProduct.py index c409cb67..e66ad77d 100644 --- a/asf_search/Products/NISARProduct.py +++ b/asf_search/Products/NISARProduct.py @@ -11,7 +11,7 @@ class NISARProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/ """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']} } diff --git a/asf_search/Products/OPERAS1Product.py b/asf_search/Products/OPERAS1Product.py index 7b03f095..9ee2b45e 100644 --- a/asf_search/Products/OPERAS1Product.py +++ b/asf_search/Products/OPERAS1Product.py @@ -9,7 +9,7 @@ class OPERAS1Product(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/opera/ """ _base_properties = { - **S1Product._properties_paths, + **S1Product._base_properties, 'centerLat': {'path': []}, # Opera products lacks these fields 'centerLon': {'path': []}, 'frameNumber': {'path': []}, diff --git a/asf_search/Products/RADARSATProduct.py b/asf_search/Products/RADARSATProduct.py index 2edeec41..8dba91e8 100644 --- a/asf_search/Products/RADARSATProduct.py +++ b/asf_search/Products/RADARSATProduct.py @@ -9,7 +9,7 @@ class RADARSATProduct(ASFStackableProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/radarsat-1/ """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, diff --git a/asf_search/Products/S1BurstProduct.py b/asf_search/Products/S1BurstProduct.py index 671ca6c9..986a800a 100644 --- a/asf_search/Products/S1BurstProduct.py +++ b/asf_search/Products/S1BurstProduct.py @@ -18,7 +18,7 @@ class S1BurstProduct(S1Product): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/ """ _base_properties = { - **S1Product._properties_paths, + **S1Product._base_properties, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]}, 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int}, 'relativeBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_RELATIVE'), 'Values', 0], 'cast': try_parse_int}, diff --git a/asf_search/Products/S1Product.py b/asf_search/Products/S1Product.py index d400444b..6165a4cc 100644 --- a/asf_search/Products/S1Product.py +++ b/asf_search/Products/S1Product.py @@ -16,7 +16,7 @@ class S1Product(ASFStackableProduct): """ _base_properties = { - **ASFStackableProduct._properties_paths, + **ASFStackableProduct._base_properties, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/SEASATProduct.py b/asf_search/Products/SEASATProduct.py index eae227f5..6cbe3479 100644 --- a/asf_search/Products/SEASATProduct.py +++ b/asf_search/Products/SEASATProduct.py @@ -8,7 +8,7 @@ class SEASATProduct(ASFProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/seasat/ """ _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'bytes': {'path': [ 'AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/SIRCProduct.py b/asf_search/Products/SIRCProduct.py index 77de2c82..812c2bfa 100644 --- a/asf_search/Products/SIRCProduct.py +++ b/asf_search/Products/SIRCProduct.py @@ -6,7 +6,7 @@ class SIRCProduct(ASFProduct): Dataset Documentation Page: https://eospso.nasa.gov/missions/spaceborne-imaging-radar-c """ _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion'] }, diff --git a/asf_search/Products/SMAPProduct.py b/asf_search/Products/SMAPProduct.py index b47a8c81..d852c7f8 100644 --- a/asf_search/Products/SMAPProduct.py +++ b/asf_search/Products/SMAPProduct.py @@ -8,7 +8,7 @@ class SMAPProduct(ASFProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/ """ _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/Products/UAVSARProduct.py b/asf_search/Products/UAVSARProduct.py index a335d1a9..edf35f29 100644 --- a/asf_search/Products/UAVSARProduct.py +++ b/asf_search/Products/UAVSARProduct.py @@ -8,7 +8,7 @@ class UAVSARProduct(ASFProduct): ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/uavsar/ """ _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'groupID': {'path': [ 'AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': [ 'AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': [ 'AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, diff --git a/asf_search/search/baseline_search.py b/asf_search/search/baseline_search.py index b48751ab..b50b15ae 100644 --- a/asf_search/search/baseline_search.py +++ b/asf_search/search/baseline_search.py @@ -101,7 +101,7 @@ def _cast_to_subclass(product: ASFProduct, subclass: Type[ASFProduct]) -> ASFPro ``` class MyCustomClass(ASFProduct): _base_properties = { - **ASFProduct._properties_paths, + **ASFProduct._base_properties, 'some_unique_property': {'path': ['AdditionalAttributes', 'UNIQUE_PROPERTY', ...]} } From 651f4b461738943002693558cb9d9636632d5a6d Mon Sep 17 00:00:00 2001 From: kim Date: Fri, 2 Aug 2024 12:19:54 -0800 Subject: [PATCH 30/30] removes PR trigger for pytest workflow, update example --- .github/workflows/run-pytest.yml | 2 +- ...vanced-Custom-ASFProduct-Subclassing.ipynb | 21 ++++++------------- 2 files changed, 7 insertions(+), 16 deletions(-) diff --git a/.github/workflows/run-pytest.yml b/.github/workflows/run-pytest.yml index 855336a6..56d759fb 100644 --- a/.github/workflows/run-pytest.yml +++ b/.github/workflows/run-pytest.yml @@ -1,6 +1,6 @@ name: tests -on: [pull_request, push] +on: [push] jobs: run-tests: diff --git a/examples/Advanced-Custom-ASFProduct-Subclassing.ipynb b/examples/Advanced-Custom-ASFProduct-Subclassing.ipynb index 804f1667..39cec733 100644 --- a/examples/Advanced-Custom-ASFProduct-Subclassing.ipynb +++ b/examples/Advanced-Custom-ASFProduct-Subclassing.ipynb @@ -15,14 +15,13 @@ "- `get_stack_opts()` (returns None in `ASFProduct`, implemented by `ASFStackableProduct` subclass and its subclasses)\n", "- `centroid()`\n", "- `remotezip()` (requires asf-search's optional dependency be installed)\n", - "- `get_property_paths()` (gets product's keywords and their paths in umm dictionary)\n", "- `translate_product()` (reads properties from umm, populates `properties` with associated keyword)\n", "- `get_sort_keys()`\n", "- `umm_get()`\n", "\n", "Key Properties:\n", "- `properties`\n", - "- `_base_properties` (What `get_property_paths()` uses to find values in umm json `properties`)\n", + "- `_base_properties` (maps `properties` keys to values in umm json)\n", "- `umm` (The product's umm JSON from CMR)\n", "- `metadata` (The product's metadata JSON from CMR)" ] @@ -196,12 +195,13 @@ " self.timestamp = datetime.now()\n", "\n", " # _base_properties is a special dict of ASFProduct that maps keywords to granule UMM json\n", - " # defining properties and their paths here in conjunction with `get_property_paths()` \n", - " # will let you easily access them in the product's `properties` dictionary\n", + " # defining properties and their paths here will let you\n", + " # easily access them in the product's `properties` dictionary\n", " # see `ASFProduct.umm_get()` for explanation of pathing\n", " _base_properties = {\n", " # Most product types use `CENTER_ESA_FRAME` as the value for `frameNumber` (unlike S1 and ALOS, which use `FRAME_NUMBER`), \n", " # this creates a new `esaFrame` property so we have that value too\n", + " **asf.S1Product._base_properties,\n", " 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME)\n", " }\n", "\n", @@ -234,16 +234,7 @@ " output['properties']['timestamp'] = str(self.timestamp)\n", " output['properties']['ASFSearchVersion'] = asf.__version__\n", " return output\n", - " \n", - " # This method is used internally by `ASFProduct.translate_product()` \n", - " # to traverse the granule UMM for each property's corresponding values\n", - " @staticmethod\n", - " def get_property_paths() -> dict:\n", - " return {\n", - " **asf.S1Product.get_property_paths(),\n", - " **MyCustomS1Subclass._base_properties\n", - " }\n", - " \n", + "\n", " # ASFProduct.stack() normally stacks the current product\n", " # in this version we search for every SLC-BURST product that\n", " # overlaps the given area with the same source scene, \n", @@ -367,7 +358,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.11.5" } }, "nbformat": 4,