diff --git a/CHANGELOG.md b/CHANGELOG.md
index 818caee5..133bfd5d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [2.1.0]
+### Added
+- The ability for the `insar_tops_burst` workflow to support processing multiple bursts as one SLC.
+
+### Changed
+- The interface for `insar_tops_burst` so that it takes `--reference` and `--secondary` granule lists. The positional `granules` argument is now optional and deprecated.
+- Moved HyP3 product packaging functionality out of `insar_tops_burst.py` and to a new `packaging.py` so that both `insar_tops` and `insar_tops_burst` can use it.
+
## [2.0.0]
### Changed
- Orbit files are now retrieved using the [s1-orbits](https://github.com/ASFHyP3/sentinel1-orbits-py) library.
diff --git a/README.md b/README.md
index 276c86df..8f629431 100644
--- a/README.md
+++ b/README.md
@@ -8,22 +8,32 @@ The HyP3-ISCE2 plugin provides a set of workflows to process SAR satellite data
## Usage
The HyP3-ISCE2 plugin provides a set of workflows (accessible directly in Python or via a CLI) that can be used to process SAR data using ISCE2. The workflows currently included in this plugin are:
-- `insar_tops`: A workflow for creating full-SLC Sentinel-1 geocoded unwrapped interferogram using ISCE2's TOPS processing workflow
-- `insar_tops_burst`: A workflow for creating single-burst Sentinel-1 geocoded unwrapped interferogram using ISCE2's TOPS processing workflow
+- `insar_stripmap`: A workflow for creating ALOS-1 geocoded unwrapped interferogram using ISCE2's Stripmap processing workflow
+- `insar_tops`: A workflow for creating full-SLC Sentinel-1 geocoded unwrapped interferogram using ISCE2's TOPS processing workflow
+- `insar_tops_burst`: A workflow for creating burst-based Sentinel-1 geocoded unwrapped interferogram using ISCE2's TOPS processing workflow
---
To run a workflow, simply run `python -m hyp3_isce2 ++process [WORKFLOW_NAME] [WORKFLOW_ARGS]`. For example, to run the `insar_tops_burst` workflow:
```
python -m hyp3_isce2 ++process insar_tops_burst \
- S1_136231_IW2_20200604T022312_VV_7C85-BURST \
- S1_136231_IW2_20200616T022313_VV_5D11-BURST \
+ --reference S1_136231_IW2_20200604T022312_VV_7C85-BURST \
+ --secondary S1_136231_IW2_20200616T022313_VV_5D11-BURST \
--looks 20x4 \
--apply-water-mask True
```
-This command will create a Sentinel-1 interferogram that contains a deformation signal related to a
-2020 Iranian earthquake.
+and, for multiple burst pairs:
+
+```
+python -m hyp3_isce2 ++process insar_tops_burst \
+ --reference S1_136231_IW2_20200604T022312_VV_7C85-BURST S1_136232_IW2_20200604T022315_VV_7C85-BURST \
+ --secondary S1_136231_IW2_20200616T022313_VV_5D11-BURST S1_136232_IW2_20200616T022316_VV_5D11-BURST \
+ --looks 20x4 \
+ --apply-water-mask True
+```
+
+These commands will both create a Sentinel-1 interferogram that contains a deformation signal related to a 2020 Iranian earthquake.
### Product Merging Utility Usage
**This feature is under active development and is subject to change!**
diff --git a/environment.yml b/environment.yml
index 46ba920b..ed4fda34 100644
--- a/environment.yml
+++ b/environment.yml
@@ -14,6 +14,7 @@ dependencies:
- asf_search>=6.4.0
- gdal
- opencv
+ - burst2safe
# For packaging, and testing
- flake8
- flake8-import-order
diff --git a/images/coverage.svg b/images/coverage.svg
index 6c15cace..ffd257bd 100644
--- a/images/coverage.svg
+++ b/images/coverage.svg
@@ -9,13 +9,13 @@
-
+
coverage
coverage
- 75%
- 75%
+ 71%
+ 71%
diff --git a/pyproject.toml b/pyproject.toml
index 7533f21e..1a573a03 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -29,6 +29,7 @@ dependencies = [
"gdal",
"hyp3lib>=3,<4",
"s1_orbits",
+ "burst2safe"
# Conda-forge only dependencies are listed below
# "opencv",
# "isce2>=2.6.3",
diff --git a/src/hyp3_isce2/burst.py b/src/hyp3_isce2/burst.py
index 2ab7a3cd..adec696f 100644
--- a/src/hyp3_isce2/burst.py
+++ b/src/hyp3_isce2/burst.py
@@ -7,8 +7,7 @@
from dataclasses import dataclass
from datetime import datetime, timedelta
from pathlib import Path
-from secrets import token_hex
-from typing import Iterator, List, Optional, Tuple, Union
+from typing import Iterable, Iterator, List, Optional, Tuple, Union
import asf_search
import numpy as np
@@ -345,45 +344,6 @@ def download_bursts(param_list: Iterator[BurstParams]) -> List[BurstMetadata]:
return bursts
-def get_product_name(reference_scene: str, secondary_scene: str, pixel_spacing: int) -> str:
- """Get the name of the interferogram product.
-
- Args:
- reference_scene: The reference burst name.
- secondary_scene: The secondary burst name.
- pixel_spacing: The spacing of the pixels in the output image.
-
- Returns:
- The name of the interferogram product.
- """
-
- reference_split = reference_scene.split('_')
- secondary_split = secondary_scene.split('_')
-
- platform = reference_split[0]
- burst_id = reference_split[1]
- image_plus_swath = reference_split[2]
- reference_date = reference_split[3][0:8]
- secondary_date = secondary_split[3][0:8]
- polarization = reference_split[4]
- product_type = 'INT'
- pixel_spacing = str(int(pixel_spacing))
- product_id = token_hex(2).upper()
-
- return '_'.join(
- [
- platform,
- burst_id,
- image_plus_swath,
- reference_date,
- secondary_date,
- polarization,
- product_type + pixel_spacing,
- product_id,
- ]
- )
-
-
def get_burst_params(scene_name: str) -> BurstParams:
results = asf_search.search(product_list=[scene_name])
@@ -400,35 +360,47 @@ def get_burst_params(scene_name: str) -> BurstParams:
)
-def validate_bursts(reference_scene: str, secondary_scene: str) -> None:
+def validate_bursts(reference: Union[str, Iterable[str]], secondary: Union[str, Iterable[str]]) -> None:
"""Check whether the reference and secondary bursts are valid.
Args:
- reference_scene: The reference burst name.
- secondary_scene: The secondary burst name.
-
- Returns:
- None
+ reference: Reference granule(s)
+ secondary: Secondary granule(s)
"""
- ref_split = reference_scene.split('_')
- sec_split = secondary_scene.split('_')
+ if isinstance(reference, str):
+ reference = [reference]
+ if isinstance(secondary, str):
+ secondary = [secondary]
+
+ if len(reference) < 1 or len(secondary) < 1:
+ raise ValueError('Must include at least 1 reference and 1 secondary burst')
+ if len(reference) != len(secondary):
+ raise ValueError('Must have the same number of reference and secondary bursts')
+
+ ref_num_swath_pol = sorted(g.split('_')[1] + '_' + g.split('_')[2] + '_' + g.split('_')[4] for g in reference)
+ sec_num_swath_pol = sorted(g.split('_')[1] + '_' + g.split('_')[2] + '_' + g.split('_')[4] for g in secondary)
+ if ref_num_swath_pol != sec_num_swath_pol:
+ msg = 'The reference and secondary burst ID sets do not match.\n'
+ msg += f' Reference IDs: {ref_num_swath_pol}\n'
+ msg += f' Secondary IDs: {sec_num_swath_pol}'
+ raise ValueError(msg)
+
+ pols = list(set(g.split('_')[4] for g in reference + secondary))
- ref_burst_id = ref_split[1]
- sec_burst_id = sec_split[1]
+ if len(pols) > 1:
+ raise ValueError(f'All bursts must have a single polarization. Polarizations present: {" ".join(pols)}')
- ref_polarization = ref_split[4]
- sec_polarization = sec_split[4]
+ if pols[0] not in ['VV', 'HH']:
+ raise ValueError(f'{pols[0]} polarization is not currently supported, only VV and HH.')
- if ref_burst_id != sec_burst_id:
- raise ValueError(f'The reference and secondary burst IDs are not the same: {ref_burst_id} and {sec_burst_id}.')
+ ref_dates = list(set(g.split('_')[3][:8] for g in reference))
+ sec_dates = list(set(g.split('_')[3][:8] for g in secondary))
- if ref_polarization != sec_polarization:
- raise ValueError(
- f'The reference and secondary polarizations are not the same: {ref_polarization} and {sec_polarization}.'
- )
+ if len(ref_dates) > 1 or len(sec_dates) > 1:
+ raise ValueError('Reference granules must be from one date and secondary granules must be another.')
- if ref_polarization != 'VV' and ref_polarization != 'HH':
- raise ValueError(f'{ref_polarization} polarization is not currently supported, only VV and HH.')
+ if ref_dates[0] >= sec_dates[0]:
+ raise ValueError('Reference granules must be older than secondary granules.')
def load_burst_position(swath_xml_path: str, burst_number: int) -> BurstPosition:
@@ -572,7 +544,7 @@ def safely_multilook(
if subset_to_valid:
last_line = position.first_valid_line + position.n_valid_lines
last_sample = position.first_valid_sample + position.n_valid_samples
- mask[position.first_valid_line: last_line, position.first_valid_sample: last_sample] = identity_value
+ mask[position.first_valid_line:last_line, position.first_valid_sample:last_sample] = identity_value
else:
mask[:, :] = identity_value
diff --git a/src/hyp3_isce2/insar_stripmap.py b/src/hyp3_isce2/insar_stripmap.py
index b0cc8412..b0d181a6 100644
--- a/src/hyp3_isce2/insar_stripmap.py
+++ b/src/hyp3_isce2/insar_stripmap.py
@@ -23,7 +23,7 @@
log = logging.getLogger(__name__)
-def insar_stripmap(user: str, password: str, reference_scene: str, secondary_scene: str) -> Path:
+def insar_stripmap(reference_scene: str, secondary_scene: str) -> Path:
"""Create a Stripmap interferogram
Args:
@@ -35,12 +35,22 @@ def insar_stripmap(user: str, password: str, reference_scene: str, secondary_sce
Returns:
Path to the output files
"""
- session = asf_search.ASFSession().auth_with_creds(user, password)
-
- reference_product, secondary_product = asf_search.search(
+ scenes = sorted([reference_scene, secondary_scene])
+ print(scenes)
+ reference_scene = scenes[0]
+ secondary_scene = scenes[1]
+ products = asf_search.search(
granule_list=[reference_scene, secondary_scene],
- processingLevel=asf_search.L1_0,
+ processingLevel="L1.0",
)
+
+ if products[0].properties['sceneName'] == reference_scene:
+ reference_product = products[0]
+ secondary_product = products[1]
+ else:
+ reference_product = products[1]
+ secondary_product = products[0]
+
assert reference_product.properties['sceneName'] == reference_scene
assert secondary_product.properties['sceneName'] == secondary_scene
products = (reference_product, secondary_product)
@@ -51,7 +61,7 @@ def insar_stripmap(user: str, password: str, reference_scene: str, secondary_sce
dem_path = download_dem_for_isce2(insar_roi, dem_name='glo_30', dem_dir=Path('dem'), buffer=0)
urls = [product.properties['url'] for product in products]
- asf_search.download_urls(urls=urls, path=os.getcwd(), session=session, processes=2)
+ asf_search.download_urls(urls=urls, path=os.getcwd(), processes=2)
zip_paths = [product.properties['fileName'] for product in products]
for zip_path in zip_paths:
@@ -93,7 +103,7 @@ def insar_stripmap(user: str, password: str, reference_scene: str, secondary_sce
def get_product_file(product: asf_search.ASFProduct, file_prefix: str) -> str:
paths = glob.glob(str(Path(product.properties['fileID']) / f'{file_prefix}*'))
- assert len(paths) == 1
+ assert len(paths) > 0
return paths[0]
@@ -104,10 +114,8 @@ def main():
parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)')
parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)')
- parser.add_argument('--username', type=str, required=True)
- parser.add_argument('--password', type=str, required=True)
- parser.add_argument('--reference-scene', type=str, required=True)
- parser.add_argument('--secondary-scene', type=str, required=True)
+ parser.add_argument('--reference', type=str, required=True)
+ parser.add_argument('--secondary', type=str, required=True)
args = parser.parse_args()
@@ -117,10 +125,8 @@ def main():
log.info('Begin InSAR Stripmap run')
product_dir = insar_stripmap(
- user=args.username,
- password=args.password,
- reference_scene=args.reference_scene,
- secondary_scene=args.secondary_scene,
+ reference_scene=args.reference,
+ secondary_scene=args.secondary,
)
log.info('InSAR Stripmap run completed successfully')
diff --git a/src/hyp3_isce2/insar_tops.py b/src/hyp3_isce2/insar_tops.py
index 4feb81dd..98219248 100644
--- a/src/hyp3_isce2/insar_tops.py
+++ b/src/hyp3_isce2/insar_tops.py
@@ -6,14 +6,16 @@
from pathlib import Path
from shutil import copyfile, make_archive
-from hyp3lib.aws import upload_file_to_s3
+from hyp3lib.util import string_is_true
+from isceobj.TopsProc.runMergeBursts import multilook
from s1_orbits import fetch_for_scene
-from hyp3_isce2 import slc
-from hyp3_isce2 import topsapp
+from hyp3_isce2 import packaging, slc, topsapp
from hyp3_isce2.dem import download_dem_for_isce2
from hyp3_isce2.logger import configure_root_logger
from hyp3_isce2.s1_auxcal import download_aux_cal
+from hyp3_isce2.utils import image_math, isce2_copy, make_browse_image, resample_to_radar_io
+from hyp3_isce2.water_mask import create_water_mask
log = logging.getLogger(__name__)
@@ -26,6 +28,7 @@ def insar_tops(
polarization: str = 'VV',
azimuth_looks: int = 4,
range_looks: int = 20,
+ apply_water_mask: bool = False,
) -> Path:
"""Create a full-SLC interferogram
@@ -36,6 +39,7 @@ def insar_tops(
polarization: Polarization to use
azimuth_looks: Number of azimuth looks
range_looks: Number of range looks
+ apply_water_mask: Apply water mask to unwrapped phase
Returns:
Path to the output files
@@ -46,6 +50,7 @@ def insar_tops(
ref_dir = slc.get_granule(reference_scene)
sec_dir = slc.get_granule(secondary_scene)
+
roi = slc.get_dem_bounds(ref_dir, sec_dir)
log.info(f'DEM ROI: {roi}')
@@ -58,7 +63,7 @@ def insar_tops(
orbit_file = fetch_for_scene(granule, dir=orbit_dir)
log.info(f'Got orbit file {orbit_file} from s1_orbits')
- config = topsapp.TopsappBurstConfig(
+ config = topsapp.TopsappConfig(
reference_safe=f'{reference_scene}.SAFE',
secondary_safe=f'{secondary_scene}.SAFE',
polarization=polarization,
@@ -73,49 +78,139 @@ def insar_tops(
)
config_path = config.write_template('topsApp.xml')
- topsapp.run_topsapp_burst(start='startup', end='unwrap2stage', config_xml=config_path)
+ if apply_water_mask:
+ topsapp.run_topsapp(start='startup', end='filter', config_xml=config_path)
+ water_mask_path = 'water_mask.wgs84'
+ create_water_mask(str(dem_path), water_mask_path)
+ multilook('merged/lon.rdr.full', outname='merged/lon.rdr', alks=azimuth_looks, rlks=range_looks)
+ multilook('merged/lat.rdr.full', outname='merged/lat.rdr', alks=azimuth_looks, rlks=range_looks)
+ resample_to_radar_io(water_mask_path, 'merged/lat.rdr', 'merged/lon.rdr', 'merged/water_mask.rdr')
+ isce2_copy('merged/phsig.cor', 'merged/unmasked.phsig.cor')
+ image_math('merged/unmasked.phsig.cor', 'merged/water_mask.rdr', 'merged/phsig.cor', 'a*b')
+ topsapp.run_topsapp(start='unwrap', end='unwrap2stage', config_xml=config_path)
+ isce2_copy('merged/unmasked.phsig.cor', 'merged/phsig.cor')
+ else:
+ topsapp.run_topsapp(start='startup', end='unwrap2stage', config_xml=config_path)
copyfile('merged/z.rdr.full.xml', 'merged/z.rdr.full.vrt.xml')
- topsapp.run_topsapp_burst(start='geocode', end='geocode', config_xml=config_path)
+ topsapp.run_topsapp(start='geocode', end='geocode', config_xml=config_path)
return Path('merged')
+def insar_tops_packaged(
+ reference: str,
+ secondary: str,
+ swaths: list = [1, 2, 3],
+ polarization: str = 'VV',
+ azimuth_looks: int = 4,
+ range_looks: int = 20,
+ apply_water_mask: bool = True,
+ bucket: str = None,
+ bucket_prefix: str = '',
+) -> Path:
+ """Create a full-SLC interferogram
+
+ Args:
+ reference: Reference SLC name
+ secondary: Secondary SLC name
+ swaths: Swaths to process
+ polarization: Polarization to use
+ azimuth_looks: Number of azimuth looks
+ range_looks: Number of range looks
+ apply_water_mask: Apply water mask to unwrapped phase
+ bucket: AWS S3 bucket to upload the final product to
+ bucket_prefix: Bucket prefix to prefix to use when uploading the final product
+
+ Returns:
+ Path to the output files
+ """
+ pixel_size = packaging.get_pixel_size(f'{range_looks}x{azimuth_looks}')
+
+ log.info('Begin ISCE2 TopsApp run')
+
+ insar_tops(
+ reference_scene=reference,
+ secondary_scene=secondary,
+ swaths=swaths,
+ polarization=polarization,
+ azimuth_looks=azimuth_looks,
+ range_looks=range_looks,
+ apply_water_mask=apply_water_mask,
+ )
+
+ log.info('ISCE2 TopsApp run completed successfully')
+
+ product_name = packaging.get_product_name(
+ reference, secondary, pixel_spacing=int(pixel_size), polarization=polarization, slc=True
+ )
+
+ product_dir = Path(product_name)
+ product_dir.mkdir(parents=True, exist_ok=True)
+
+ packaging.translate_outputs(product_name, pixel_size=pixel_size)
+
+ unwrapped_phase = f'{product_name}/{product_name}_unw_phase.tif'
+ if apply_water_mask:
+ packaging.water_mask(unwrapped_phase, f'{product_name}/{product_name}_water_mask.tif')
+
+ make_browse_image(unwrapped_phase, f'{product_name}/{product_name}_unw_phase.png')
+ packaging.make_readme(
+ product_dir=product_dir,
+ product_name=product_name,
+ reference_scene=reference,
+ secondary_scene=secondary,
+ range_looks=range_looks,
+ azimuth_looks=azimuth_looks,
+ apply_water_mask=apply_water_mask,
+ )
+ packaging.make_parameter_file(
+ Path(f'{product_name}/{product_name}.txt'),
+ reference_scene=reference,
+ secondary_scene=secondary,
+ azimuth_looks=azimuth_looks,
+ range_looks=range_looks,
+ apply_water_mask=apply_water_mask,
+ )
+ output_zip = make_archive(base_name=product_name, format='zip', base_dir=product_name)
+ if bucket:
+ packaging.upload_product_to_s3(product_dir, output_zip, bucket, bucket_prefix)
+
+
def main():
"""HyP3 entrypoint for the SLC TOPS workflow"""
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
-
- parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)')
- parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)')
- parser.add_argument('--reference-scene', type=str, required=True)
- parser.add_argument('--secondary-scene', type=str, required=True)
- parser.add_argument('--polarization', type=str, choices=['VV', 'HH'], default='VV')
+ parser.add_argument('--reference', type=str, help='Reference granule')
+ parser.add_argument('--secondary', type=str, help='Secondary granule')
+ parser.add_argument('--polarization', type=str, default='VV', help='Polarization to use')
parser.add_argument(
- '--looks',
- choices=['20x4', '10x2', '5x1'],
- default='20x4',
- help='Number of looks to take in range and azimuth'
+ '--looks', choices=['20x4', '10x2', '5x1'], default='20x4', help='Number of looks to take in range and azimuth'
)
+ parser.add_argument(
+ '--apply-water-mask',
+ type=string_is_true,
+ default=False,
+ help='Apply a water body mask before unwrapping.',
+ )
+ parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)')
+ parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)')
args = parser.parse_args()
-
configure_root_logger()
log.debug(' '.join(sys.argv))
- log.info('Begin ISCE2 TopsApp run')
-
range_looks, azimuth_looks = [int(looks) for looks in args.looks.split('x')]
- isce_output_dir = insar_tops(
- reference_scene=args.reference_scene,
- secondary_scene=args.secondary_scene,
+ if args.polarization not in ['VV', 'VH', 'HV', 'HH']:
+ raise ValueError('Polarization must be one of VV, VH, HV, or HH')
+
+ insar_tops_packaged(
+ reference=args.reference,
+ secondary=args.secondary,
polarization=args.polarization,
azimuth_looks=azimuth_looks,
range_looks=range_looks,
+ apply_water_mask=args.apply_water_mask,
+ bucket=args.bucket,
+ bucket_prefix=args.bucket_prefix,
)
log.info('ISCE2 TopsApp run completed successfully')
-
- product_name = f'{args.reference_scene}x{args.secondary_scene}'
- output_zip = make_archive(base_name=product_name, format='zip', base_dir=isce_output_dir)
-
- if args.bucket:
- upload_file_to_s3(Path(output_zip), args.bucket, args.bucket_prefix)
diff --git a/src/hyp3_isce2/insar_tops_burst.py b/src/hyp3_isce2/insar_tops_burst.py
index f76b35dd..c9a52e74 100644
--- a/src/hyp3_isce2/insar_tops_burst.py
+++ b/src/hyp3_isce2/insar_tops_burst.py
@@ -1,51 +1,34 @@
-"""Create a single-burst Sentinel-1 geocoded unwrapped interferogram using ISCE2's TOPS processing workflow"""
+"""Create a Sentinel-1 geocoded unwrapped interferogram using ISCE2's TOPS processing workflow from a set of bursts"""
import argparse
import logging
-import os
-import subprocess
import sys
-from dataclasses import dataclass
-from datetime import datetime, timezone
+import warnings
from pathlib import Path
from shutil import copyfile, make_archive
from typing import Iterable, Optional
-import isce
-from hyp3lib.aws import upload_file_to_s3
-from hyp3lib.image import create_thumbnail
+import isce # noqa
+from burst2safe.burst2safe import burst2safe
from hyp3lib.util import string_is_true
from isceobj.TopsProc.runMergeBursts import multilook
-from lxml import etree
-from osgeo import gdal, gdalconst
-from pyproj import CRS
+from osgeo import gdal
from s1_orbits import fetch_for_scene
-import hyp3_isce2
-import hyp3_isce2.metadata.util
-from hyp3_isce2 import topsapp
+from hyp3_isce2 import packaging, topsapp
from hyp3_isce2.burst import (
- BurstPosition,
download_bursts,
get_burst_params,
get_isce2_burst_bbox,
- get_product_name,
get_region_of_interest,
multilook_radar_merge_inputs,
validate_bursts,
)
from hyp3_isce2.dem import download_dem_for_isce2
+from hyp3_isce2.insar_tops import insar_tops_packaged
from hyp3_isce2.logger import configure_root_logger
from hyp3_isce2.s1_auxcal import download_aux_cal
-from hyp3_isce2.utils import (
- ParameterFile,
- image_math,
- isce2_copy,
- make_browse_image,
- oldest_granule_first,
- resample_to_radar_io,
- utm_from_lon_lat,
-)
+from hyp3_isce2.utils import image_math, isce2_copy, make_browse_image, resample_to_radar_io
from hyp3_isce2.water_mask import create_water_mask
@@ -54,14 +37,6 @@
log = logging.getLogger(__name__)
-@dataclass
-class ISCE2Dataset:
- name: str
- suffix: str
- band: Iterable[int]
- dtype: Optional[int] = gdalconst.GDT_Float32
-
-
def insar_tops_burst(
reference_scene: str,
secondary_scene: str,
@@ -121,7 +96,7 @@ def insar_tops_burst(
orbit_file = fetch_for_scene(granule, dir=orbit_dir)
log.info(f'Got orbit file {orbit_file} from s1_orbits')
- config = topsapp.TopsappBurstConfig(
+ config = topsapp.TopsappConfig(
reference_safe=f'{ref_params.granule}.SAFE',
secondary_safe=f'{sec_params.granule}.SAFE',
polarization=ref_params.polarization,
@@ -136,10 +111,10 @@ def insar_tops_burst(
)
config_path = config.write_template('topsApp.xml')
- topsapp.run_topsapp_burst(start='startup', end='preprocess', config_xml=config_path)
+ topsapp.run_topsapp(start='startup', end='preprocess', config_xml=config_path)
topsapp.swap_burst_vrts()
if apply_water_mask:
- topsapp.run_topsapp_burst(start='computeBaselines', end='filter', config_xml=config_path)
+ topsapp.run_topsapp(start='computeBaselines', end='filter', config_xml=config_path)
water_mask_path = 'water_mask.wgs84'
create_water_mask(str(dem_path), water_mask_path)
multilook('merged/lon.rdr.full', outname='merged/lon.rdr', alks=azimuth_looks, rlks=range_looks)
@@ -147,381 +122,33 @@ def insar_tops_burst(
resample_to_radar_io(water_mask_path, 'merged/lat.rdr', 'merged/lon.rdr', 'merged/water_mask.rdr')
isce2_copy('merged/phsig.cor', 'merged/unmasked.phsig.cor')
image_math('merged/unmasked.phsig.cor', 'merged/water_mask.rdr', 'merged/phsig.cor', 'a*b')
- topsapp.run_topsapp_burst(start='unwrap', end='unwrap2stage', config_xml=config_path)
+ topsapp.run_topsapp(start='unwrap', end='unwrap2stage', config_xml=config_path)
isce2_copy('merged/unmasked.phsig.cor', 'merged/phsig.cor')
else:
- topsapp.run_topsapp_burst(start='computeBaselines', end='unwrap2stage', config_xml=config_path)
+ topsapp.run_topsapp(start='computeBaselines', end='unwrap2stage', config_xml=config_path)
copyfile('merged/z.rdr.full.xml', 'merged/z.rdr.full.vrt.xml')
- topsapp.run_topsapp_burst(start='geocode', end='geocode', config_xml=config_path)
+ topsapp.run_topsapp(start='geocode', end='geocode', config_xml=config_path)
return Path('merged')
-def make_readme(
- product_dir: Path,
- product_name: str,
- reference_scene: str,
- secondary_scene: str,
- range_looks: int,
- azimuth_looks: int,
- apply_water_mask: bool,
-) -> None:
- wrapped_phase_path = product_dir / f'{product_name}_wrapped_phase.tif'
- info = gdal.Info(str(wrapped_phase_path), format='json')
- secondary_granule_datetime_str = secondary_scene.split('_')[3]
-
- payload = {
- 'processing_date': datetime.now(timezone.utc),
- 'plugin_name': hyp3_isce2.__name__,
- 'plugin_version': hyp3_isce2.__version__,
- 'processor_name': isce.__name__.upper(),
- 'processor_version': isce.__version__,
- 'projection': hyp3_isce2.metadata.util.get_projection(info['coordinateSystem']['wkt']),
- 'pixel_spacing': info['geoTransform'][1],
- 'product_name': product_name,
- 'reference_burst_name': reference_scene,
- 'secondary_burst_name': secondary_scene,
- 'range_looks': range_looks,
- 'azimuth_looks': azimuth_looks,
- 'secondary_granule_date': datetime.strptime(secondary_granule_datetime_str, '%Y%m%dT%H%M%S'),
- 'dem_name': 'GLO-30',
- 'dem_pixel_spacing': '30 m',
- 'apply_water_mask': apply_water_mask,
- }
- content = hyp3_isce2.metadata.util.render_template('insar_burst/insar_burst_readme.md.txt.j2', payload)
-
- output_file = product_dir / f'{product_name}_README.md.txt'
- with open(output_file, 'w') as f:
- f.write(content)
-
-
-def make_parameter_file(
- out_path: Path,
- reference_scene: str,
- secondary_scene: str,
- swath_number: int,
- azimuth_looks: int,
- range_looks: int,
- multilook_position: BurstPosition,
- apply_water_mask: bool,
- dem_name: str = 'GLO_30',
- dem_resolution: int = 30,
-) -> None:
- """Create a parameter file for the output product
-
- Args:
- out_path: path to output the parameter file
- reference_scene: Reference burst name
- secondary_scene: Secondary burst name
- swath_number: Number of swath to grab bursts from (1, 2, or 3) for IW
- azimuth_looks: Number of azimuth looks
- range_looks: Number of range looks
- dem_name: Name of the DEM that is use
- dem_resolution: Resolution of the DEM
-
- returns:
- None
- """
- SPEED_OF_LIGHT = 299792458.0
- SPACECRAFT_HEIGHT = 693000.0
- EARTH_RADIUS = 6337286.638938101
-
- parser = etree.XMLParser(encoding='utf-8', recover=True)
-
- ref_tag = reference_scene[-10:-6]
- sec_tag = secondary_scene[-10:-6]
- reference_safe = [file for file in os.listdir('.') if file.endswith(f'{ref_tag}.SAFE')][0]
- secondary_safe = [file for file in os.listdir('.') if file.endswith(f'{sec_tag}.SAFE')][0]
-
- ref_annotation_path = f'{reference_safe}/annotation/'
- ref_annotation = [file for file in os.listdir(ref_annotation_path) if os.path.isfile(ref_annotation_path + file)][0]
-
- ref_manifest_xml = etree.parse(f'{reference_safe}/manifest.safe', parser)
- sec_manifest_xml = etree.parse(f'{secondary_safe}/manifest.safe', parser)
- ref_annotation_xml = etree.parse(f'{ref_annotation_path}{ref_annotation}', parser)
- topsProc_xml = etree.parse('topsProc.xml', parser)
- topsApp_xml = etree.parse('topsApp.xml', parser)
-
- safe = '{http://www.esa.int/safe/sentinel-1.0}'
- s1 = '{http://www.esa.int/safe/sentinel-1.0/sentinel-1}'
- metadata_path = './/metadataObject[@ID="measurementOrbitReference"]//xmlData//'
- orbit_number_query = metadata_path + safe + 'orbitNumber'
- orbit_direction_query = metadata_path + safe + 'extension//' + s1 + 'pass'
-
- ref_orbit_number = ref_manifest_xml.find(orbit_number_query).text
- ref_orbit_direction = ref_manifest_xml.find(orbit_direction_query).text
- sec_orbit_number = sec_manifest_xml.find(orbit_number_query).text
- sec_orbit_direction = sec_manifest_xml.find(orbit_direction_query).text
- ref_heading = float(ref_annotation_xml.find('.//platformHeading').text)
- ref_time = ref_annotation_xml.find('.//productFirstLineUtcTime').text
- slant_range_time = float(ref_annotation_xml.find('.//slantRangeTime').text)
- range_sampling_rate = float(ref_annotation_xml.find('.//rangeSamplingRate').text)
- number_samples = int(ref_annotation_xml.find('.//swathTiming/samplesPerBurst').text)
- baseline_perp = topsProc_xml.find(f'.//IW-{swath_number}_Bperp_at_midrange_for_first_common_burst').text
- unwrapper_type = topsApp_xml.find('.//property[@name="unwrapper name"]').text
- phase_filter_strength = topsApp_xml.find('.//property[@name="filter strength"]').text
-
- slant_range_near = float(slant_range_time) * SPEED_OF_LIGHT / 2
- range_pixel_spacing = SPEED_OF_LIGHT / (2 * range_sampling_rate)
- slant_range_far = slant_range_near + (number_samples - 1) * range_pixel_spacing
- slant_range_center = (slant_range_near + slant_range_far) / 2
-
- s = ref_time.split('T')[1].split(':')
- utc_time = ((int(s[0]) * 60 + int(s[1])) * 60) + float(s[2])
-
- parameter_file = ParameterFile(
- reference_granule=reference_scene,
- secondary_granule=secondary_scene,
- reference_orbit_direction=ref_orbit_direction,
- reference_orbit_number=ref_orbit_number,
- secondary_orbit_direction=sec_orbit_direction,
- secondary_orbit_number=sec_orbit_number,
- baseline=float(baseline_perp),
- utc_time=utc_time,
- heading=ref_heading,
- spacecraft_height=SPACECRAFT_HEIGHT,
- earth_radius_at_nadir=EARTH_RADIUS,
- slant_range_near=slant_range_near,
- slant_range_center=slant_range_center,
- slant_range_far=slant_range_far,
- range_looks=int(range_looks),
- azimuth_looks=int(azimuth_looks),
- insar_phase_filter=True,
- phase_filter_parameter=float(phase_filter_strength),
- range_bandpass_filter=False,
- azimuth_bandpass_filter=False,
- dem_source=dem_name,
- dem_resolution=dem_resolution,
- unwrapping_type=unwrapper_type,
- speckle_filter=True,
- water_mask=apply_water_mask,
- radar_n_lines=multilook_position.n_lines,
- radar_n_samples=multilook_position.n_samples,
- radar_first_valid_line=multilook_position.first_valid_line,
- radar_n_valid_lines=multilook_position.n_valid_lines,
- radar_first_valid_sample=multilook_position.first_valid_sample,
- radar_n_valid_samples=multilook_position.n_valid_samples,
- multilook_azimuth_time_interval=multilook_position.azimuth_time_interval,
- multilook_range_pixel_size=multilook_position.range_pixel_size,
- radar_sensing_stop=multilook_position.sensing_stop,
- )
- parameter_file.write(out_path)
-
-
-def find_product(pattern: str) -> str:
- """Find a single file within the working directory's structure
-
- Args:
- pattern: Glob pattern for file
-
- Returns
- Path to file
- """
- search = Path.cwd().glob(pattern)
- product = str(list(search)[0])
- return product
-
-
-def translate_outputs(product_name: str, pixel_size: float, include_radar: bool = False, use_multilooked=False) -> None:
- """Translate ISCE outputs to a standard GTiff format with a UTM projection.
- Assume you are in the top level of an ISCE run directory
-
- Args:
- product_name: Name of the product
- pixel_size: Pixel size
- include_radar: Flag to include the full resolution radar geometry products in the output
- """
-
- src_ds = gdal.Open('merged/filt_topophase.unw.geo')
- src_geotransform = src_ds.GetGeoTransform()
- src_projection = src_ds.GetProjection()
-
- target_ds = gdal.Open('merged/dem.crop', gdal.GA_Update)
- target_ds.SetGeoTransform(src_geotransform)
- target_ds.SetProjection(src_projection)
-
- del src_ds, target_ds
-
- datasets = [
- ISCE2Dataset('merged/filt_topophase.unw.geo', 'unw_phase', [2]),
- ISCE2Dataset('merged/phsig.cor.geo', 'corr', [1]),
- ISCE2Dataset('merged/dem.crop', 'dem', [1]),
- ISCE2Dataset('merged/filt_topophase.unw.conncomp.geo', 'conncomp', [1]),
- ]
-
- suffix = '01'
- if use_multilooked:
- suffix += '.multilooked'
-
- rdr_datasets = [
- ISCE2Dataset(
- find_product(f'fine_interferogram/IW*/burst_{suffix}.int.vrt'),
- 'wrapped_phase_rdr',
- [1],
- gdalconst.GDT_CFloat32,
- ),
- ISCE2Dataset(find_product(f'geom_reference/IW*/lat_{suffix}.rdr.vrt'), 'lat_rdr', [1]),
- ISCE2Dataset(find_product(f'geom_reference/IW*/lon_{suffix}.rdr.vrt'), 'lon_rdr', [1]),
- ISCE2Dataset(find_product(f'geom_reference/IW*/los_{suffix}.rdr.vrt'), 'los_rdr', [1, 2]),
- ]
- if include_radar:
- datasets += rdr_datasets
-
- for dataset in datasets:
- out_file = str(Path(product_name) / f'{product_name}_{dataset.suffix}.tif')
- gdal.Translate(
- destName=out_file,
- srcDS=dataset.name,
- bandList=dataset.band,
- format='GTiff',
- outputType=dataset.dtype,
- noData=0,
- creationOptions=['TILED=YES', 'COMPRESS=LZW', 'NUM_THREADS=ALL_CPUS'],
- )
-
- # Use numpy.angle to extract the phase component of the complex wrapped interferogram
- wrapped_phase = ISCE2Dataset('filt_topophase.flat.geo', 'wrapped_phase', 1)
- cmd = (
- 'gdal_calc.py '
- f'--outfile {product_name}/{product_name}_{wrapped_phase.suffix}.tif '
- f'-A merged/{wrapped_phase.name} --A_band={wrapped_phase.band} '
- '--calc angle(A) --type Float32 --format GTiff --NoDataValue=0 '
- '--creation-option TILED=YES --creation-option COMPRESS=LZW --creation-option NUM_THREADS=ALL_CPUS'
- )
- subprocess.run(cmd.split(' '), check=True)
-
- ds = gdal.Open('merged/los.rdr.geo', gdal.GA_Update)
- ds.GetRasterBand(1).SetNoDataValue(0)
- ds.GetRasterBand(2).SetNoDataValue(0)
- del ds
-
- # Performs the inverse of the operation performed by MintPy:
- # https://github.com/insarlab/MintPy/blob/df96e0b73f13cc7e2b6bfa57d380963f140e3159/src/mintpy/objects/stackDict.py#L732-L737
- # First subtract the incidence angle from ninety degrees to go from sensor-to-ground to ground-to-sensor,
- # then convert to radians
- incidence_angle = ISCE2Dataset('los.rdr.geo', 'lv_theta', 1)
- cmd = (
- 'gdal_calc.py '
- f'--outfile {product_name}/{product_name}_{incidence_angle.suffix}.tif '
- f'-A merged/{incidence_angle.name} --A_band={incidence_angle.band} '
- '--calc (90-A)*pi/180 --type Float32 --format GTiff --NoDataValue=0 '
- '--creation-option TILED=YES --creation-option COMPRESS=LZW --creation-option NUM_THREADS=ALL_CPUS'
- )
- subprocess.run(cmd.split(' '), check=True)
-
- # Performs the inverse of the operation performed by MintPy:
- # https://github.com/insarlab/MintPy/blob/df96e0b73f13cc7e2b6bfa57d380963f140e3159/src/mintpy/objects/stackDict.py#L739-L745
- # First add ninety degrees to the azimuth angle to go from angle-from-east to angle-from-north,
- # then convert to radians
- azimuth_angle = ISCE2Dataset('los.rdr.geo', 'lv_phi', 2)
- cmd = (
- 'gdal_calc.py '
- f'--outfile {product_name}/{product_name}_{azimuth_angle.suffix}.tif '
- f'-A merged/{azimuth_angle.name} --A_band={azimuth_angle.band} '
- '--calc (90+A)*pi/180 --type Float32 --format GTiff --NoDataValue=0 '
- '--creation-option TILED=YES --creation-option COMPRESS=LZW --creation-option NUM_THREADS=ALL_CPUS'
- )
- subprocess.run(cmd.split(' '), check=True)
-
- ds = gdal.Open('merged/filt_topophase.unw.geo')
- geotransform = ds.GetGeoTransform()
- del ds
-
- epsg = utm_from_lon_lat(geotransform[0], geotransform[3])
- files = [str(path) for path in Path(product_name).glob('*.tif') if not path.name.endswith('rdr.tif')]
- for file in files:
- gdal.Warp(
- file,
- file,
- dstSRS=f'epsg:{epsg}',
- creationOptions=['TILED=YES', 'COMPRESS=LZW', 'NUM_THREADS=ALL_CPUS'],
- xRes=pixel_size,
- yRes=pixel_size,
- targetAlignedPixels=True,
- )
-
-
-def get_pixel_size(looks: str) -> float:
- return {'20x4': 80.0, '10x2': 40.0, '5x1': 20.0}[looks]
-
-
-def convert_raster_from_isce2_gdal(input_image, ref_image, output_image):
- """Convert the water mask in WGS84 to be the same projection and extent of the output product.
-
- Args:
- input_image: dem file name
- ref_image: output geotiff file name
- output_image: water mask file name
- """
-
- ref_ds = gdal.Open(ref_image)
-
- gt = ref_ds.GetGeoTransform()
-
- pixel_size = gt[1]
-
- minx = gt[0]
- maxx = gt[0] + gt[1] * ref_ds.RasterXSize
- maxy = gt[3]
- miny = gt[3] + gt[5] * ref_ds.RasterYSize
-
- crs = ref_ds.GetSpatialRef()
- epsg = CRS.from_wkt(crs.ExportToWkt()).to_epsg()
-
- del ref_ds
-
- gdal.Warp(
- output_image,
- input_image,
- dstSRS=f'epsg:{epsg}',
- creationOptions=['TILED=YES', 'COMPRESS=LZW', 'NUM_THREADS=ALL_CPUS'],
- outputBounds=[minx, miny, maxx, maxy],
- xRes=pixel_size,
- yRes=pixel_size,
- targetAlignedPixels=True,
- )
-
-
-def main():
- """HyP3 entrypoint for the burst TOPS workflow"""
- parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
-
- parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)')
- parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)')
- parser.add_argument(
- '--looks', choices=['20x4', '10x2', '5x1'], default='20x4', help='Number of looks to take in range and azimuth'
- )
- parser.add_argument(
- '--apply-water-mask',
- type=string_is_true,
- default=False,
- help='Apply a water body mask before unwrapping.',
- )
- # Allows granules to be given as a space-delimited list of strings (e.g. foo bar) or as a single
- # quoted string that contains spaces (e.g. "foo bar"). AWS Batch uses the latter format when
- # invoking the container command.
- parser.add_argument('granules', type=str.split, nargs='+')
-
- args = parser.parse_args()
-
- args.granules = [item for sublist in args.granules for item in sublist]
- if len(args.granules) != 2:
- parser.error('Must provide exactly two granules')
-
- configure_root_logger()
- log.debug(' '.join(sys.argv))
+def insar_tops_single_burst(
+ reference: str,
+ secondary: str,
+ looks: str = '20x4',
+ apply_water_mask=False,
+ bucket: Optional[str] = None,
+ bucket_prefix: str = '',
+):
+ validate_bursts(reference, secondary)
+ swath_number = int(reference[12])
+ range_looks, azimuth_looks = [int(value) for value in looks.split('x')]
log.info('Begin ISCE2 TopsApp run')
- reference_scene, secondary_scene = oldest_granule_first(args.granules[0], args.granules[1])
- validate_bursts(reference_scene, secondary_scene)
- swath_number = int(reference_scene[12])
- range_looks, azimuth_looks = [int(looks) for looks in args.looks.split('x')]
- apply_water_mask = args.apply_water_mask
-
insar_tops_burst(
- reference_scene=reference_scene,
- secondary_scene=secondary_scene,
+ reference_scene=reference,
+ secondary_scene=secondary,
azimuth_looks=azimuth_looks,
range_looks=range_looks,
swath_number=swath_number,
@@ -532,58 +159,138 @@ def main():
multilook_position = multilook_radar_merge_inputs(swath_number, rg_looks=range_looks, az_looks=azimuth_looks)
- pixel_size = get_pixel_size(args.looks)
- product_name = get_product_name(reference_scene, secondary_scene, pixel_spacing=int(pixel_size))
+ pixel_size = packaging.get_pixel_size(looks)
+ product_name = packaging.get_product_name(reference, secondary, pixel_spacing=int(pixel_size), slc=False)
product_dir = Path(product_name)
product_dir.mkdir(parents=True, exist_ok=True)
- translate_outputs(product_name, pixel_size=pixel_size, include_radar=True, use_multilooked=True)
+ packaging.translate_outputs(product_name, pixel_size=pixel_size, include_radar=True, use_multilooked=True)
unwrapped_phase = f'{product_name}/{product_name}_unw_phase.tif'
- water_mask = f'{product_name}/{product_name}_water_mask.tif'
-
if apply_water_mask:
- convert_raster_from_isce2_gdal('water_mask.wgs84', unwrapped_phase, water_mask)
- cmd = (
- 'gdal_calc.py '
- f'--outfile {unwrapped_phase} '
- f'-A {unwrapped_phase} -B {water_mask} '
- '--calc A*B '
- '--overwrite '
- '--NoDataValue 0 '
- '--creation-option TILED=YES --creation-option COMPRESS=LZW --creation-option NUM_THREADS=ALL_CPUS'
- )
- subprocess.run(cmd.split(' '), check=True)
+ packaging.water_mask(unwrapped_phase, f'{product_name}/{product_name}_water_mask.tif')
make_browse_image(unwrapped_phase, f'{product_name}/{product_name}_unw_phase.png')
- make_readme(
+ packaging.make_readme(
product_dir=product_dir,
product_name=product_name,
- reference_scene=reference_scene,
- secondary_scene=secondary_scene,
+ reference_scene=reference,
+ secondary_scene=secondary,
range_looks=range_looks,
azimuth_looks=azimuth_looks,
apply_water_mask=apply_water_mask,
)
- make_parameter_file(
+ packaging.make_parameter_file(
Path(f'{product_name}/{product_name}.txt'),
- reference_scene=reference_scene,
- secondary_scene=secondary_scene,
+ reference_scene=reference,
+ secondary_scene=secondary,
azimuth_looks=azimuth_looks,
range_looks=range_looks,
- swath_number=swath_number,
multilook_position=multilook_position,
apply_water_mask=apply_water_mask,
)
output_zip = make_archive(base_name=product_name, format='zip', base_dir=product_name)
- if args.bucket:
- for browse in product_dir.glob('*.png'):
- create_thumbnail(browse, output_dir=product_dir)
+ if bucket:
+ packaging.upload_product_to_s3(product_dir, output_zip, bucket, bucket_prefix)
+
+
+def insar_tops_multi_burst(
+ reference: Iterable[str],
+ secondary: Iterable[str],
+ swaths: list = [1, 2, 3],
+ looks: str = '20x4',
+ apply_water_mask=False,
+ bucket: Optional[str] = None,
+ bucket_prefix: str = '',
+):
+ validate_bursts(reference, secondary)
+ reference_safe_path = burst2safe(reference)
+ reference_safe = reference_safe_path.name.split('.')[0]
+ secondary_safe_path = burst2safe(secondary)
+ secondary_safe = secondary_safe_path.name.split('.')[0]
+
+ range_looks, azimuth_looks = [int(value) for value in looks.split('x')]
+ swaths = list(set(int(granule.split('_')[2][2]) for granule in reference))
+ polarization = reference[0].split('_')[4]
+
+ log.info('Begin ISCE2 TopsApp run')
+ insar_tops_packaged(
+ reference=reference_safe,
+ secondary=secondary_safe,
+ swaths=swaths,
+ polarization=polarization,
+ azimuth_looks=azimuth_looks,
+ range_looks=range_looks,
+ apply_water_mask=apply_water_mask,
+ bucket=bucket,
+ bucket_prefix=bucket_prefix,
+ )
+ log.info('ISCE2 TopsApp run completed successfully')
+
+
+def oldest_granule_first(g1, g2):
+ if g1[14:29] <= g2[14:29]:
+ return [g1], [g2]
+ return [g2], [g1]
+
+
+def main():
+ """HyP3 entrypoint for the burst TOPS workflow"""
+ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser.add_argument('granules', type=str.split, nargs='*', help='Reference and secondary scene names')
+ parser.add_argument('--reference', type=str.split, nargs='+', help='List of reference scenes"')
+ parser.add_argument('--secondary', type=str.split, nargs='+', help='List of secondary scenes"')
+ parser.add_argument(
+ '--looks', choices=['20x4', '10x2', '5x1'], default='20x4', help='Number of looks to take in range and azimuth'
+ )
+ parser.add_argument(
+ '--apply-water-mask', type=string_is_true, default=False, help='Apply a water body mask before unwrapping.'
+ )
+ parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)')
+ parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)')
+
+ args = parser.parse_args()
- upload_file_to_s3(Path(output_zip), args.bucket, args.bucket_prefix)
+ has_granules = args.granules is not None and len(args.granules) > 0
+ has_ref_sec = args.reference is not None and args.secondary is not None
+ if has_granules and has_ref_sec:
+ parser.error('Provide either --reference and --secondary, or the positional granules argument, not both.')
+ elif not has_granules and not has_ref_sec:
+ parser.error('Either --reference and --secondary, or the positional granules argument, must be provided.')
+ elif has_granules:
+ warnings.warn(
+ 'The positional argument for granules is deprecated. Please use --reference and --secondary.',
+ DeprecationWarning,
+ )
+ granules = [item for sublist in args.granules for item in sublist]
+ if len(granules) != 2:
+ parser.error('No more than two granules may be provided.')
+ reference, secondary = oldest_granule_first(granules[0], granules[1])
+ else:
+ reference = [item for sublist in args.reference for item in sublist]
+ secondary = [item for sublist in args.secondary for item in sublist]
- for product_file in product_dir.iterdir():
- upload_file_to_s3(product_file, args.bucket, args.bucket_prefix)
+ configure_root_logger()
+ log.debug(' '.join(sys.argv))
+
+ if len(reference) == 1:
+ insar_tops_single_burst(
+ reference=reference[0],
+ secondary=secondary[0],
+ looks=args.looks,
+ apply_water_mask=args.apply_water_mask,
+ bucket=args.bucket,
+ bucket_prefix=args.bucket_prefix,
+ )
+ else:
+ insar_tops_multi_burst(
+ reference=reference,
+ secondary=secondary,
+ looks=args.looks,
+ apply_water_mask=args.apply_water_mask,
+ bucket=args.bucket,
+ bucket_prefix=args.bucket_prefix,
+ )
diff --git a/src/hyp3_isce2/merge_tops_bursts.py b/src/hyp3_isce2/merge_tops_bursts.py
index f3ef8457..4ccc327a 100644
--- a/src/hyp3_isce2/merge_tops_bursts.py
+++ b/src/hyp3_isce2/merge_tops_bursts.py
@@ -39,9 +39,11 @@
import hyp3_isce2
import hyp3_isce2.burst as burst_utils
from hyp3_isce2.dem import download_dem_for_isce2
+from hyp3_isce2.packaging import get_pixel_size, translate_outputs
from hyp3_isce2.utils import (
ParameterFile,
create_image,
+ get_projection,
image_math,
load_product,
make_browse_image,
@@ -55,8 +57,6 @@
logging.basicConfig(stream=sys.stdout, format=log_format, level=logging.INFO, force=True)
log = logging.getLogger(__name__)
-from hyp3_isce2.insar_tops_burst import get_pixel_size, translate_outputs # noqa
-
BURST_IFG_DIR = 'fine_interferogram'
BURST_GEOM_DIR = 'geom_reference'
@@ -1027,7 +1027,7 @@ def make_readme(
'plugin_version': hyp3_isce2.__version__,
'processor_name': isce.__name__.upper(), # noqa
'processor_version': isce.__version__, # noqa
- 'projection': hyp3_isce2.metadata.util.get_projection(info['coordinateSystem']['wkt']),
+ 'projection': get_projection(info['coordinateSystem']['wkt']),
'pixel_spacing': info['geoTransform'][1],
'product_name': product_name,
'reference_burst_name': ', '.join(reference_scenes),
diff --git a/src/hyp3_isce2/metadata/util.py b/src/hyp3_isce2/metadata/util.py
index d8ada9a7..1f238b0c 100644
--- a/src/hyp3_isce2/metadata/util.py
+++ b/src/hyp3_isce2/metadata/util.py
@@ -1,5 +1,4 @@
from jinja2 import Environment, PackageLoader, StrictUndefined, select_autoescape
-from osgeo import osr
def get_environment() -> Environment:
@@ -19,9 +18,3 @@ def render_template(template: str, payload: dict) -> str:
template = env.get_template(template)
rendered = template.render(payload)
return rendered
-
-
-def get_projection(srs_wkt) -> str:
- srs = osr.SpatialReference()
- srs.ImportFromWkt(srs_wkt)
- return srs.GetAttrValue('projcs')
diff --git a/src/hyp3_isce2/packaging.py b/src/hyp3_isce2/packaging.py
new file mode 100644
index 00000000..09da4ec7
--- /dev/null
+++ b/src/hyp3_isce2/packaging.py
@@ -0,0 +1,476 @@
+import os
+import subprocess
+from dataclasses import dataclass
+from datetime import datetime, timezone
+from pathlib import Path
+from secrets import token_hex
+from typing import Iterable, Optional
+
+import isce
+import numpy as np
+from hyp3lib.aws import upload_file_to_s3
+from hyp3lib.image import create_thumbnail
+from lxml import etree
+from osgeo import gdal, gdalconst
+from pyproj import CRS
+
+import hyp3_isce2
+import hyp3_isce2.metadata.util
+from hyp3_isce2.burst import BurstPosition
+from hyp3_isce2.slc import get_geometry_from_manifest
+from hyp3_isce2.utils import ParameterFile, get_projection, utm_from_lon_lat
+
+
+@dataclass
+class ISCE2Dataset:
+ name: str
+ suffix: str
+ band: Iterable[int]
+ dtype: Optional[int] = gdalconst.GDT_Float32
+
+
+def get_pixel_size(looks: str) -> float:
+ return {'20x4': 80.0, '10x2': 40.0, '5x1': 20.0}[looks]
+
+
+def find_product(pattern: str) -> str:
+ """Find a single file within the working directory's structure
+
+ Args:
+ pattern: Glob pattern for file
+
+ Returns
+ Path to file
+ """
+ search = Path.cwd().glob(pattern)
+ product = str(list(search)[0])
+ return product
+
+
+def get_product_name(
+ reference: str, secondary: str, pixel_spacing: int, polarization: Optional[str] = None, slc: bool = True
+) -> str:
+ """Get the name of the interferogram product.
+
+ Args:
+ reference: The reference burst name.
+ secondary: The secondary burst name.
+ pixel_spacing: The spacing of the pixels in the output image.
+ polarization: The polarization of the input data. Only required for SLCs.
+ slc: Whether the input scenes are SLCs or bursts.
+
+ Returns:
+ The name of the interferogram product.
+ """
+ reference_split = reference.split('_')
+ secondary_split = secondary.split('_')
+
+ if slc:
+ parser = etree.XMLParser(encoding='utf-8', recover=True)
+ safe = '{http://www.esa.int/safe/sentinel-1.0}'
+ platform = reference_split[0]
+ reference_date = reference_split[5][0:8]
+ secondary_date = secondary_split[5][0:8]
+ if not polarization:
+ raise ValueError('Polarization is required for SLCs')
+ elif polarization not in ['VV', 'VH', 'HV', 'HH']:
+ raise ValueError('Polarization must be one of VV, VH, HV, or HH')
+ ref_manifest_xml = etree.parse(f'{reference}.SAFE/manifest.safe', parser)
+ metadata_path = './/metadataObject[@ID="measurementOrbitReference"]//xmlData//'
+ relative_orbit_number_query = metadata_path + safe + 'relativeOrbitNumber'
+ orbit_number = ref_manifest_xml.find(relative_orbit_number_query).text.zfill(3)
+ footprint = get_geometry_from_manifest(Path(f'{reference}.SAFE/manifest.safe'))
+ lons, lats = footprint.exterior.coords.xy
+
+ def lat_string(lat):
+ return ('N' if lat >= 0 else 'S') + f"{('%.1f' % np.abs(lat)).zfill(4)}".replace('.', '_')
+
+ def lon_string(lon):
+ return ('E' if lon >= 0 else 'W') + f"{('%.1f' % np.abs(lon)).zfill(5)}".replace('.', '_')
+
+ lat_lims = [lat_string(lat) for lat in [np.min(lats), np.max(lats)]]
+ lon_lims = [lon_string(lon) for lon in [np.min(lons), np.max(lons)]]
+ name_parts = [platform, orbit_number, lon_lims[0], lat_lims[0], lon_lims[1], lat_lims[1]]
+ else:
+ platform = reference_split[0]
+ burst_id = reference_split[1]
+ image_plus_swath = reference_split[2]
+ reference_date = reference_split[3][0:8]
+ secondary_date = secondary_split[3][0:8]
+ polarization = reference_split[4]
+ name_parts = [platform, burst_id, image_plus_swath]
+ product_type = 'INT'
+ pixel_spacing = str(int(pixel_spacing))
+ product_id = token_hex(2).upper()
+ product_name = '_'.join(
+ name_parts
+ + [
+ reference_date,
+ secondary_date,
+ polarization,
+ product_type + pixel_spacing,
+ product_id,
+ ]
+ )
+
+ return product_name
+
+
+def translate_outputs(product_name: str, pixel_size: float, include_radar: bool = False, use_multilooked=False) -> None:
+ """Translate ISCE outputs to a standard GTiff format with a UTM projection.
+ Assume you are in the top level of an ISCE run directory
+
+ Args:
+ product_name: Name of the product
+ pixel_size: Pixel size
+ include_radar: Flag to include the full resolution radar geometry products in the output
+ use_multilooked: Flag to use multilooked versions of the radar geometry products
+ """
+
+ src_ds = gdal.Open('merged/filt_topophase.unw.geo')
+ src_geotransform = src_ds.GetGeoTransform()
+ src_projection = src_ds.GetProjection()
+
+ target_ds = gdal.Open('merged/dem.crop', gdal.GA_Update)
+ target_ds.SetGeoTransform(src_geotransform)
+ target_ds.SetProjection(src_projection)
+
+ del src_ds, target_ds
+
+ datasets = [
+ ISCE2Dataset('merged/filt_topophase.unw.geo', 'unw_phase', [2]),
+ ISCE2Dataset('merged/phsig.cor.geo', 'corr', [1]),
+ ISCE2Dataset('merged/dem.crop', 'dem', [1]),
+ ISCE2Dataset('merged/filt_topophase.unw.conncomp.geo', 'conncomp', [1]),
+ ]
+
+ suffix = '01'
+ if use_multilooked:
+ suffix += '.multilooked'
+
+ rdr_datasets = [
+ ISCE2Dataset(
+ find_product(f'fine_interferogram/IW*/burst_{suffix}.int.vrt'),
+ 'wrapped_phase_rdr',
+ [1],
+ gdalconst.GDT_CFloat32,
+ ),
+ ISCE2Dataset(find_product(f'geom_reference/IW*/lat_{suffix}.rdr.vrt'), 'lat_rdr', [1]),
+ ISCE2Dataset(find_product(f'geom_reference/IW*/lon_{suffix}.rdr.vrt'), 'lon_rdr', [1]),
+ ISCE2Dataset(find_product(f'geom_reference/IW*/los_{suffix}.rdr.vrt'), 'los_rdr', [1, 2]),
+ ]
+ if include_radar:
+ datasets += rdr_datasets
+
+ for dataset in datasets:
+ out_file = str(Path(product_name) / f'{product_name}_{dataset.suffix}.tif')
+ gdal.Translate(
+ destName=out_file,
+ srcDS=dataset.name,
+ bandList=dataset.band,
+ format='GTiff',
+ outputType=dataset.dtype,
+ noData=0,
+ creationOptions=['TILED=YES', 'COMPRESS=LZW', 'NUM_THREADS=ALL_CPUS'],
+ )
+
+ # Use numpy.angle to extract the phase component of the complex wrapped interferogram
+ wrapped_phase = ISCE2Dataset('filt_topophase.flat.geo', 'wrapped_phase', 1)
+ cmd = (
+ 'gdal_calc.py '
+ f'--outfile {product_name}/{product_name}_{wrapped_phase.suffix}.tif '
+ f'-A merged/{wrapped_phase.name} --A_band={wrapped_phase.band} '
+ '--calc angle(A) --type Float32 --format GTiff --NoDataValue=0 '
+ '--creation-option TILED=YES --creation-option COMPRESS=LZW --creation-option NUM_THREADS=ALL_CPUS'
+ )
+ subprocess.run(cmd.split(' '), check=True)
+
+ ds = gdal.Open('merged/los.rdr.geo', gdal.GA_Update)
+ ds.GetRasterBand(1).SetNoDataValue(0)
+ ds.GetRasterBand(2).SetNoDataValue(0)
+ del ds
+
+ # Performs the inverse of the operation performed by MintPy:
+ # https://github.com/insarlab/MintPy/blob/df96e0b73f13cc7e2b6bfa57d380963f140e3159/src/mintpy/objects/stackDict.py#L732-L737
+ # First subtract the incidence angle from ninety degrees to go from sensor-to-ground to ground-to-sensor,
+ # then convert to radians
+ incidence_angle = ISCE2Dataset('los.rdr.geo', 'lv_theta', 1)
+ cmd = (
+ 'gdal_calc.py '
+ f'--outfile {product_name}/{product_name}_{incidence_angle.suffix}.tif '
+ f'-A merged/{incidence_angle.name} --A_band={incidence_angle.band} '
+ '--calc (90-A)*pi/180 --type Float32 --format GTiff --NoDataValue=0 '
+ '--creation-option TILED=YES --creation-option COMPRESS=LZW --creation-option NUM_THREADS=ALL_CPUS'
+ )
+ subprocess.run(cmd.split(' '), check=True)
+
+ # Performs the inverse of the operation performed by MintPy:
+ # https://github.com/insarlab/MintPy/blob/df96e0b73f13cc7e2b6bfa57d380963f140e3159/src/mintpy/objects/stackDict.py#L739-L745
+ # First add ninety degrees to the azimuth angle to go from angle-from-east to angle-from-north,
+ # then convert to radians
+ azimuth_angle = ISCE2Dataset('los.rdr.geo', 'lv_phi', 2)
+ cmd = (
+ 'gdal_calc.py '
+ f'--outfile {product_name}/{product_name}_{azimuth_angle.suffix}.tif '
+ f'-A merged/{azimuth_angle.name} --A_band={azimuth_angle.band} '
+ '--calc (90+A)*pi/180 --type Float32 --format GTiff --NoDataValue=0 '
+ '--creation-option TILED=YES --creation-option COMPRESS=LZW --creation-option NUM_THREADS=ALL_CPUS'
+ )
+ subprocess.run(cmd.split(' '), check=True)
+
+ ds = gdal.Open('merged/filt_topophase.unw.geo')
+ geotransform = ds.GetGeoTransform()
+ del ds
+
+ epsg = utm_from_lon_lat(geotransform[0], geotransform[3])
+ files = [str(path) for path in Path(product_name).glob('*.tif') if not path.name.endswith('rdr.tif')]
+ for file in files:
+ gdal.Warp(
+ file,
+ file,
+ dstSRS=f'epsg:{epsg}',
+ creationOptions=['TILED=YES', 'COMPRESS=LZW', 'NUM_THREADS=ALL_CPUS'],
+ xRes=pixel_size,
+ yRes=pixel_size,
+ targetAlignedPixels=True,
+ )
+
+
+def convert_raster_from_isce2_gdal(input_image, ref_image, output_image):
+ """Convert the water mask in WGS84 to be the same projection and extent of the output product.
+
+ Args:
+ input_image: dem file name
+ ref_image: output geotiff file name
+ output_image: water mask file name
+ """
+
+ ref_ds = gdal.Open(ref_image)
+
+ gt = ref_ds.GetGeoTransform()
+
+ pixel_size = gt[1]
+
+ minx = gt[0]
+ maxx = gt[0] + gt[1] * ref_ds.RasterXSize
+ maxy = gt[3]
+ miny = gt[3] + gt[5] * ref_ds.RasterYSize
+
+ crs = ref_ds.GetSpatialRef()
+ epsg = CRS.from_wkt(crs.ExportToWkt()).to_epsg()
+
+ del ref_ds
+
+ gdal.Warp(
+ output_image,
+ input_image,
+ dstSRS=f'epsg:{epsg}',
+ creationOptions=['TILED=YES', 'COMPRESS=LZW', 'NUM_THREADS=ALL_CPUS'],
+ outputBounds=[minx, miny, maxx, maxy],
+ xRes=pixel_size,
+ yRes=pixel_size,
+ targetAlignedPixels=True,
+ )
+
+
+def water_mask(unwrapped_phase: str, water_mask: str) -> None:
+ """Apply the water mask to the unwrapped phase
+
+ Args:
+ unwrapped_phase: The unwrapped phase file
+ water_mask: The water mask file
+ """
+
+ convert_raster_from_isce2_gdal('water_mask.wgs84', unwrapped_phase, water_mask)
+ cmd = (
+ 'gdal_calc.py '
+ f'--outfile {unwrapped_phase} '
+ f'-A {unwrapped_phase} -B {water_mask} '
+ '--calc A*B '
+ '--overwrite '
+ '--NoDataValue 0 '
+ '--creation-option TILED=YES --creation-option COMPRESS=LZW --creation-option NUM_THREADS=ALL_CPUS'
+ )
+ subprocess.run(cmd.split(' '), check=True)
+
+
+def make_readme(
+ product_dir: Path,
+ product_name: str,
+ reference_scene: str,
+ secondary_scene: str,
+ range_looks: int,
+ azimuth_looks: int,
+ apply_water_mask: bool,
+) -> None:
+ wrapped_phase_path = product_dir / f'{product_name}_wrapped_phase.tif'
+ info = gdal.Info(str(wrapped_phase_path), format='json')
+ secondary_granule_datetime_str = secondary_scene.split('_')[3]
+ if 'T' not in secondary_granule_datetime_str:
+ secondary_granule_datetime_str = secondary_scene.split('_')[5]
+
+ payload = {
+ 'processing_date': datetime.now(timezone.utc),
+ 'plugin_name': hyp3_isce2.__name__,
+ 'plugin_version': hyp3_isce2.__version__,
+ 'processor_name': isce.__name__.upper(),
+ 'processor_version': isce.__version__,
+ 'projection': get_projection(info['coordinateSystem']['wkt']),
+ 'pixel_spacing': info['geoTransform'][1],
+ 'product_name': product_name,
+ 'reference_burst_name': reference_scene,
+ 'secondary_burst_name': secondary_scene,
+ 'range_looks': range_looks,
+ 'azimuth_looks': azimuth_looks,
+ 'secondary_granule_date': datetime.strptime(secondary_granule_datetime_str, '%Y%m%dT%H%M%S'),
+ 'dem_name': 'GLO-30',
+ 'dem_pixel_spacing': '30 m',
+ 'apply_water_mask': apply_water_mask,
+ }
+ content = hyp3_isce2.metadata.util.render_template('insar_burst/insar_burst_readme.md.txt.j2', payload)
+
+ output_file = product_dir / f'{product_name}_README.md.txt'
+ with open(output_file, 'w') as f:
+ f.write(content)
+
+
+def find_available_swaths(base_dir: Path | str) -> list[str]:
+ """Find the available swaths in the given directory
+
+ Args:
+ base_dir: Path to the directory containing the swaths
+
+ Returns:
+ List of available swaths
+ """
+ geom_dir = Path(base_dir) / 'geom_reference'
+ swaths = sorted([file.name for file in geom_dir.iterdir() if file.is_dir()])
+ return swaths
+
+
+def make_parameter_file(
+ out_path: Path,
+ reference_scene: str,
+ secondary_scene: str,
+ azimuth_looks: int,
+ range_looks: int,
+ apply_water_mask: bool,
+ multilook_position: Optional[BurstPosition] = None,
+ dem_name: str = 'GLO_30',
+ dem_resolution: int = 30,
+) -> None:
+ """Create a parameter file for the output product
+
+ Args:
+ out_path: path to output the parameter file
+ reference_scene: Reference burst name
+ secondary_scene: Secondary burst name
+ azimuth_looks: Number of azimuth looks
+ range_looks: Number of range looks
+ multilook_position: Burst position for multilooked radar geometry products
+ dem_name: Name of the DEM that is use
+ dem_resolution: Resolution of the DEM
+
+ returns:
+ None
+ """
+ SPEED_OF_LIGHT = 299792458.0
+ SPACECRAFT_HEIGHT = 693000.0
+ EARTH_RADIUS = 6337286.638938101
+
+ parser = etree.XMLParser(encoding='utf-8', recover=True)
+ if 'BURST' in reference_scene:
+ ref_tag = reference_scene[-10:-6]
+ sec_tag = secondary_scene[-10:-6]
+ else:
+ ref_tag = reference_scene[-4::]
+ sec_tag = secondary_scene[-4::]
+ reference_safe = [file for file in os.listdir('.') if file.endswith(f'{ref_tag}.SAFE')][0]
+ secondary_safe = [file for file in os.listdir('.') if file.endswith(f'{sec_tag}.SAFE')][0]
+
+ ref_annotation_path = f'{reference_safe}/annotation/'
+ ref_annotation = [file for file in os.listdir(ref_annotation_path) if os.path.isfile(ref_annotation_path + file)][0]
+
+ ref_manifest_xml = etree.parse(f'{reference_safe}/manifest.safe', parser)
+ sec_manifest_xml = etree.parse(f'{secondary_safe}/manifest.safe', parser)
+ ref_annotation_xml = etree.parse(f'{ref_annotation_path}{ref_annotation}', parser)
+ topsProc_xml = etree.parse('topsProc.xml', parser)
+ topsApp_xml = etree.parse('topsApp.xml', parser)
+
+ safe = '{http://www.esa.int/safe/sentinel-1.0}'
+ s1 = '{http://www.esa.int/safe/sentinel-1.0/sentinel-1}'
+ metadata_path = './/metadataObject[@ID="measurementOrbitReference"]//xmlData//'
+ orbit_number_query = metadata_path + safe + 'orbitNumber'
+ orbit_direction_query = metadata_path + safe + 'extension//' + s1 + 'pass'
+
+ ref_orbit_number = ref_manifest_xml.find(orbit_number_query).text
+ ref_orbit_direction = ref_manifest_xml.find(orbit_direction_query).text
+ sec_orbit_number = sec_manifest_xml.find(orbit_number_query).text
+ sec_orbit_direction = sec_manifest_xml.find(orbit_direction_query).text
+ ref_heading = float(ref_annotation_xml.find('.//platformHeading').text)
+ ref_time = ref_annotation_xml.find('.//productFirstLineUtcTime').text
+ slant_range_time = float(ref_annotation_xml.find('.//slantRangeTime').text)
+ range_sampling_rate = float(ref_annotation_xml.find('.//rangeSamplingRate').text)
+ number_samples = int(ref_annotation_xml.find('.//swathTiming/samplesPerBurst').text)
+ min_swath = find_available_swaths(Path.cwd())[0]
+ baseline_perp = topsProc_xml.find(f'.//IW-{int(min_swath[2])}_Bperp_at_midrange_for_first_common_burst').text
+ unwrapper_type = topsApp_xml.find('.//property[@name="unwrapper name"]').text
+ phase_filter_strength = topsApp_xml.find('.//property[@name="filter strength"]').text
+
+ slant_range_near = float(slant_range_time) * SPEED_OF_LIGHT / 2
+ range_pixel_spacing = SPEED_OF_LIGHT / (2 * range_sampling_rate)
+ slant_range_far = slant_range_near + (number_samples - 1) * range_pixel_spacing
+ slant_range_center = (slant_range_near + slant_range_far) / 2
+
+ s = ref_time.split('T')[1].split(':')
+ utc_time = ((int(s[0]) * 60 + int(s[1])) * 60) + float(s[2])
+
+ parameter_file = ParameterFile(
+ reference_granule=reference_scene,
+ secondary_granule=secondary_scene,
+ reference_orbit_direction=ref_orbit_direction,
+ reference_orbit_number=ref_orbit_number,
+ secondary_orbit_direction=sec_orbit_direction,
+ secondary_orbit_number=sec_orbit_number,
+ baseline=float(baseline_perp),
+ utc_time=utc_time,
+ heading=ref_heading,
+ spacecraft_height=SPACECRAFT_HEIGHT,
+ earth_radius_at_nadir=EARTH_RADIUS,
+ slant_range_near=slant_range_near,
+ slant_range_center=slant_range_center,
+ slant_range_far=slant_range_far,
+ range_looks=int(range_looks),
+ azimuth_looks=int(azimuth_looks),
+ insar_phase_filter=True,
+ phase_filter_parameter=float(phase_filter_strength),
+ range_bandpass_filter=False,
+ azimuth_bandpass_filter=False,
+ dem_source=dem_name,
+ dem_resolution=dem_resolution,
+ unwrapping_type=unwrapper_type,
+ speckle_filter=True,
+ water_mask=apply_water_mask,
+ )
+ if multilook_position:
+ parameter_file.radar_n_lines = multilook_position.n_lines
+ parameter_file.radar_n_samples = multilook_position.n_samples
+ parameter_file.radar_first_valid_line = multilook_position.first_valid_line
+ parameter_file.radar_n_valid_lines = multilook_position.n_valid_lines
+ parameter_file.radar_first_valid_sample = multilook_position.first_valid_sample
+ parameter_file.radar_n_valid_samples = multilook_position.n_valid_samples
+ parameter_file.multilook_azimuth_time_interval = multilook_position.azimuth_time_interval
+ parameter_file.multilook_range_pixel_size = multilook_position.range_pixel_size
+ parameter_file.radar_sensing_stop = multilook_position.sensing_stop
+
+ parameter_file.write(out_path)
+
+
+def upload_product_to_s3(product_dir, output_zip, bucket, bucket_prefix):
+ for browse in product_dir.glob('*.png'):
+ create_thumbnail(browse, output_dir=product_dir)
+
+ upload_file_to_s3(Path(output_zip), bucket, bucket_prefix)
+
+ for product_file in product_dir.iterdir():
+ upload_file_to_s3(product_file, bucket, bucket_prefix)
diff --git a/src/hyp3_isce2/slc.py b/src/hyp3_isce2/slc.py
index 422e4846..02be9468 100644
--- a/src/hyp3_isce2/slc.py
+++ b/src/hyp3_isce2/slc.py
@@ -4,6 +4,7 @@
from subprocess import PIPE, run
from zipfile import ZipFile
+import lxml.etree as ET
from hyp3lib.fetch import download_file
from hyp3lib.scene import get_download_url
from shapely import geometry
@@ -19,6 +20,10 @@ def get_granule(granule: str) -> Path:
Returns:
The path to the unzipped granule
"""
+ if Path(f'{granule}.SAFE').exists():
+ print('SAFE file already exists, skipping download.')
+ return Path.cwd() / f'{granule}.SAFE'
+
download_url = get_download_url(granule)
zip_file = download_file(download_url, chunk_size=10485760)
safe_dir = unzip_granule(zip_file, remove=True)
@@ -28,7 +33,7 @@ def get_granule(granule: str) -> Path:
def unzip_granule(zip_file: Path, remove: bool = False) -> Path:
with ZipFile(zip_file) as z:
z.extractall()
- safe_dir = next(item.filename for item in z.infolist() if item.is_dir() and item.filename.endswith('.SAFE/'))
+ safe_dir = zip_file.split('.')[0]+'.SAFE/'
if remove:
os.remove(zip_file)
return safe_dir.strip('/')
@@ -41,6 +46,16 @@ def get_geometry_from_kml(kml_file: str) -> Polygon:
return geometry.shape(geojson)
+def get_geometry_from_manifest(manifest_path: Path):
+ manifest = ET.parse(manifest_path).getroot()
+ frame_element = [x for x in manifest.findall('.//metadataObject') if x.get('ID') == 'measurementFrameSet'][0]
+ frame_string = frame_element.find('.//{http://www.opengis.net/gml}coordinates').text
+ coord_strings = [pair.split(',') for pair in frame_string.split(' ')]
+ coords = [(float(lon), float(lat)) for lat, lon in coord_strings]
+ footprint = Polygon(coords)
+ return footprint
+
+
def get_dem_bounds(reference_granule: Path, secondary_granule: Path) -> tuple:
"""Get the bounds of the DEM to use in processing from SAFE KML files
@@ -53,7 +68,7 @@ def get_dem_bounds(reference_granule: Path, secondary_granule: Path) -> tuple:
"""
bboxs = []
for granule in (reference_granule, secondary_granule):
- footprint = get_geometry_from_kml(str(granule / 'preview' / 'map-overlay.kml'))
+ footprint = get_geometry_from_manifest(granule / 'manifest.safe')
bbox = geometry.box(*footprint.bounds)
bboxs.append(bbox)
diff --git a/src/hyp3_isce2/topsapp.py b/src/hyp3_isce2/topsapp.py
index c233c0b3..7e7eecc5 100644
--- a/src/hyp3_isce2/topsapp.py
+++ b/src/hyp3_isce2/topsapp.py
@@ -41,7 +41,7 @@
]
-class TopsappBurstConfig:
+class TopsappConfig:
"""Configuration for a topsApp.py run"""
def __init__(
@@ -135,8 +135,8 @@ def swap_burst_vrts():
del base
-def run_topsapp_burst(dostep: str = '', start: str = '', end: str = '', config_xml: Path = Path('topsApp.xml')):
- """Run topsApp.py for a burst pair with the desired steps and config file
+def run_topsapp(dostep: str = '', start: str = '', end: str = '', config_xml: Path = Path('topsApp.xml')):
+ """Run topsApp.py for a granule pair with the desired steps and config file
Args:
dostep: The step to run
diff --git a/src/hyp3_isce2/utils.py b/src/hyp3_isce2/utils.py
index b06d2731..df3ab55d 100644
--- a/src/hyp3_isce2/utils.py
+++ b/src/hyp3_isce2/utils.py
@@ -9,7 +9,7 @@
import numpy as np
from isceobj.Util.ImageUtil.ImageLib import loadImage
from iscesys.Component.ProductManager import ProductManager
-from osgeo import gdal
+from osgeo import gdal, osr
gdal.UseExceptions()
@@ -179,12 +179,6 @@ def make_browse_image(input_tif: str, output_png: str) -> None:
)
-def oldest_granule_first(g1, g2):
- if g1[14:29] <= g2[14:29]:
- return g1, g2
- return g2, g1
-
-
def load_isce2_image(in_path) -> tuple[isceobj.Image, np.ndarray]:
"""Read an ISCE2 image file and return the image object and array.
@@ -203,7 +197,7 @@ def load_isce2_image(in_path) -> tuple[isceobj.Image, np.ndarray]:
shape = (image_obj.bands, image_obj.length, image_obj.width)
new_array = np.zeros(shape, dtype=image_obj.toNumpyDataType())
for i in range(image_obj.bands):
- new_array[i, :, :] = array[i:: image_obj.bands]
+ new_array[i, :, :] = array[i::image_obj.bands]
array = new_array.copy()
else:
raise NotImplementedError('Non-BIL reading is not implemented')
@@ -368,7 +362,7 @@ def write_isce2_image_from_obj(image_obj, array):
shape = (image_obj.length * image_obj.bands, image_obj.width)
new_array = np.zeros(shape, dtype=image_obj.toNumpyDataType())
for i in range(image_obj.bands):
- new_array[i:: image_obj.bands] = array[i, :, :]
+ new_array[i::image_obj.bands] = array[i, :, :]
array = new_array.copy()
else:
raise NotImplementedError('Non-BIL writing is not implemented')
@@ -445,3 +439,9 @@ def read_product_metadata(meta_file_path: str) -> dict:
value = ':'.join(values)
hyp3_meta[key] = value
return hyp3_meta
+
+
+def get_projection(srs_wkt) -> str:
+ srs = osr.SpatialReference()
+ srs.ImportFromWkt(srs_wkt)
+ return srs.GetAttrValue('projcs')
diff --git a/tests/test_burst.py b/tests/test_burst.py
index 645a6759..2fd04ff6 100644
--- a/tests/test_burst.py
+++ b/tests/test_burst.py
@@ -2,7 +2,6 @@
from collections import namedtuple
from datetime import datetime
from pathlib import Path
-from re import match
from unittest.mock import patch
import asf_search
@@ -107,20 +106,6 @@ def test_get_region_of_interest(tmp_path, orbit):
assert not roi.intersects(ref_bbox_post)
-def test_get_product_name():
- reference_name = 'S1_136231_IW2_20200604T022312_VV_7C85-BURST'
- secondary_name = 'S1_136231_IW2_20200616T022313_VV_5D11-BURST'
-
- name_20m = burst.get_product_name(reference_name, secondary_name, pixel_spacing=20.0)
- name_80m = burst.get_product_name(reference_name, secondary_name, pixel_spacing=80)
-
- assert match('[A-F0-9]{4}', name_20m[-4:]) is not None
- assert match('[A-F0-9]{4}', name_80m[-4:]) is not None
-
- assert name_20m.startswith('S1_136231_IW2_20200604_20200616_VV_INT20')
- assert name_80m.startswith('S1_136231_IW2_20200604_20200616_VV_INT80')
-
-
def mock_asf_search_results(
slc_name: str, subswath: str, polarization: str, burst_index: int
) -> asf_search.ASFSearchResults:
@@ -188,18 +173,44 @@ def test_get_burst_params_multiple_results():
def test_validate_bursts():
- burst.validate_bursts('S1_030349_IW1_20230808T171601_VV_4A37-BURST', 'S1_030349_IW1_20230820T171602_VV_5AC3-BURST')
- with pytest.raises(ValueError, match=r'.*polarizations are not the same.*'):
+ burst.validate_bursts('S1_000000_IW1_20200101T000000_VV_0000-BURST', 'S1_000000_IW1_20200201T000000_VV_0000-BURST')
+ burst.validate_bursts(
+ ['S1_000000_IW1_20200101T000000_VV_0000-BURST', 'S1_000001_IW1_20200101T000001_VV_0000-BURST'],
+ ['S1_000000_IW1_20200201T000000_VV_0000-BURST', 'S1_000001_IW1_20200201T000001_VV_0000-BURST'],
+ )
+
+ with pytest.raises(ValueError, match=r'Must include at least 1.*'):
+ burst.validate_bursts(['a'], [])
+
+ with pytest.raises(ValueError, match=r'Must have the same number.*'):
+ burst.validate_bursts(['a', 'b'], ['c'])
+
+ with pytest.raises(ValueError, match=r'.*burst ID sets do not match.*'):
burst.validate_bursts(
- 'S1_215032_IW2_20230802T144608_VV_7EE2-BURST', 'S1_215032_IW2_20230721T144607_HH_B3FA-BURST'
+ ['S1_000000_IW1_20200101T000000_VV_0000-BURST'], ['S1_000000_IW2_20200201T000000_VV_0000-BURST']
)
- with pytest.raises(ValueError, match=r'.*burst IDs are not the same.*'):
+
+ with pytest.raises(ValueError, match=r'.*must have a single polarization.*'):
+ burst.validate_bursts(
+ ['S1_000000_IW1_20200101T000000_VV_0000-BURST', 'S1_000000_IW1_20200101T000000_VH_0000-BURST'],
+ ['S1_000000_IW1_20200201T000000_VV_0000-BURST', 'S1_000000_IW1_20200201T000000_VH_0000-BURST'],
+ )
+
+ with pytest.raises(ValueError, match=r'.*polarization is not currently supported.*'):
burst.validate_bursts(
- 'S1_030349_IW1_20230808T171601_VV_4A37-BURST', 'S1_030348_IW1_20230820T171602_VV_5AC3-BURST'
+ ['S1_000000_IW1_20200101T000000_VH_0000-BURST', 'S1_000000_IW1_20200101T000000_VH_0000-BURST'],
+ ['S1_000000_IW1_20200201T000000_VH_0000-BURST', 'S1_000000_IW1_20200201T000000_VH_0000-BURST'],
)
- with pytest.raises(ValueError, match=r'.*only VV and HH.*'):
+
+ with pytest.raises(ValueError, match=r'.*must be from one date.*'):
+ burst.validate_bursts(
+ ['S1_000000_IW1_20200101T000000_VV_0000-BURST', 'S1_000001_IW1_20200101T000000_VV_0000-BURST'],
+ ['S1_000000_IW1_20200201T000000_VV_0000-BURST', 'S1_000001_IW1_20200202T000000_VV_0000-BURST'],
+ )
+
+ with pytest.raises(ValueError, match=r'Reference granules must be older.*'):
burst.validate_bursts(
- 'S1_030349_IW1_20230808T171601_VH_4A37-BURST', 'S1_030349_IW1_20230820T171602_VH_5AC3-BURST'
+ 'S1_000000_IW1_20200201T000000_VV_0000-BURST', 'S1_000000_IW1_20200101T000000_VV_0000-BURST'
)
diff --git a/tests/test_insar_tops_burst.py b/tests/test_insar_tops_burst.py
deleted file mode 100644
index 28690f6a..00000000
--- a/tests/test_insar_tops_burst.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from hyp3_isce2 import insar_tops_burst
-
-
-def test_get_pixel_size():
- assert insar_tops_burst.get_pixel_size('20x4') == 80.0
- assert insar_tops_burst.get_pixel_size('10x2') == 40.0
- assert insar_tops_burst.get_pixel_size('5x1') == 20.0
diff --git a/tests/test_packaging.py b/tests/test_packaging.py
new file mode 100644
index 00000000..ac9fadee
--- /dev/null
+++ b/tests/test_packaging.py
@@ -0,0 +1,23 @@
+from re import match
+
+from hyp3_isce2 import packaging
+
+
+def test_get_product_name():
+ reference_name = 'S1_136231_IW2_20200604T022312_VV_7C85-BURST'
+ secondary_name = 'S1_136231_IW2_20200616T022313_VV_5D11-BURST'
+
+ name_20m = packaging.get_product_name(reference_name, secondary_name, pixel_spacing=20.0, slc=False)
+ name_80m = packaging.get_product_name(reference_name, secondary_name, pixel_spacing=80, slc=False)
+
+ assert match('[A-F0-9]{4}', name_20m[-4:]) is not None
+ assert match('[A-F0-9]{4}', name_80m[-4:]) is not None
+
+ assert name_20m.startswith('S1_136231_IW2_20200604_20200616_VV_INT20')
+ assert name_80m.startswith('S1_136231_IW2_20200604_20200616_VV_INT80')
+
+
+def test_get_pixel_size():
+ assert packaging.get_pixel_size('20x4') == 80.0
+ assert packaging.get_pixel_size('10x2') == 40.0
+ assert packaging.get_pixel_size('5x1') == 20.0
diff --git a/tests/test_slc.py b/tests/test_slc.py
index 028de11a..6db1f343 100644
--- a/tests/test_slc.py
+++ b/tests/test_slc.py
@@ -51,7 +51,7 @@ def test_get_geometry_from_kml(test_data_dir):
def test_dem_bounds(mocker):
- mocker.patch('hyp3_isce2.slc.get_geometry_from_kml')
- slc.get_geometry_from_kml.side_effect = [box(-1, -1, 1, 1), box(0, 0, 2, 2)]
+ mocker.patch('hyp3_isce2.slc.get_geometry_from_manifest')
+ slc.get_geometry_from_manifest.side_effect = [box(-1, -1, 1, 1), box(0, 0, 2, 2)]
bounds = slc.get_dem_bounds(Path('ref'), Path('sec'))
assert bounds == (0, 0, 1, 1)
diff --git a/tests/test_topsapp.py b/tests/test_topsapp.py
index 272c0af8..fd1689df 100644
--- a/tests/test_topsapp.py
+++ b/tests/test_topsapp.py
@@ -1,10 +1,10 @@
import pytest
-from hyp3_isce2.topsapp import TopsappBurstConfig, run_topsapp_burst, swap_burst_vrts
+from hyp3_isce2.topsapp import TopsappConfig, run_topsapp, swap_burst_vrts
def test_topsapp_burst_config(tmp_path):
- config = TopsappBurstConfig(
+ config = TopsappConfig(
reference_safe='S1A_IW_SLC__1SDV_20200604T022251_20200604T022318_032861_03CE65_7C85.SAFE',
secondary_safe='S1A_IW_SLC__1SDV_20200616T022252_20200616T022319_033036_03D3A3_5D11.SAFE',
polarization='VV',
@@ -49,9 +49,9 @@ def test_swap_burst_vrts(tmp_path, monkeypatch):
def test_run_topsapp_burst(tmp_path, monkeypatch):
with pytest.raises(IOError):
- run_topsapp_burst('topsApp.xml')
+ run_topsapp('topsApp.xml')
- config = TopsappBurstConfig(
+ config = TopsappConfig(
reference_safe='',
secondary_safe='',
polarization='',
@@ -67,10 +67,10 @@ def test_run_topsapp_burst(tmp_path, monkeypatch):
template_path = config.write_template(tmp_path / 'topsApp.xml')
with pytest.raises(ValueError, match=r'.*not a valid step.*'):
- run_topsapp_burst('notastep', config_xml=template_path)
+ run_topsapp('notastep', config_xml=template_path)
with pytest.raises(ValueError, match=r'^If dostep is specified, start and stop cannot be used$'):
- run_topsapp_burst('preprocess', 'startup', config_xml=template_path)
+ run_topsapp('preprocess', 'startup', config_xml=template_path)
monkeypatch.chdir(tmp_path)
- run_topsapp_burst('preprocess', config_xml=template_path)
+ run_topsapp('preprocess', config_xml=template_path)
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 09e6b8de..2322ba2b 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -4,13 +4,12 @@
from pathlib import Path
from unittest.mock import patch
+import isceobj # noqa
import numpy as np
from osgeo import gdal
import hyp3_isce2.utils as utils
-import isceobj # noqa
-
gdal.UseExceptions()
@@ -54,13 +53,6 @@ def test_gdal_config_manager():
assert gdal.GetConfigOption('OPTION4') == 'VALUE4'
-def test_oldest_granule_first():
- oldest = 'S1_249434_IW1_20230511T170732_VV_07DE-BURST'
- latest = 'S1_249434_IW1_20230523T170733_VV_8850-BURST'
- assert utils.oldest_granule_first(oldest, latest) == (oldest, latest)
- assert utils.oldest_granule_first(latest, oldest) == (oldest, latest)
-
-
def test_make_browse_image():
input_tif = 'tests/data/test_geotiff.tif'
output_png = 'tests/data/test_browse_image2.png'