Skip to content

Commit

Permalink
Merge pull request #219 from ASFHyP3/develop
Browse files Browse the repository at this point in the history
Release v0.11.0 -- S1 Correction workflow
  • Loading branch information
jhkennedy authored Jun 10, 2023
2 parents ac7d5c7 + 12baaec commit ebe4e48
Show file tree
Hide file tree
Showing 9 changed files with 291 additions and 59 deletions.
16 changes: 16 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,22 @@ and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).


## [0.11.0]

### Added
* `hyp3_autorift`'s main entrypoint now accepts `++process` arguments to support multiple workflows
* `++process hyp3_autorift` (default) will run the same autoRIFT pair processing workflow
* `++process s1_correction` will run a Geogrid-only workflow to create the GeoTIFFs necessary for correcting the
scale-projection issue in polar-sterographic products generated from Sentinel-1 pairs that were created using HyP3
autoRIFT versions < 0.9.0, which was released November 2, 2022

### Changed
* Patch [nasa-jpl/autorift#78](hyp3_autorift/vend/CHANGES-UPSTREAM-78.diff) was applied from upstream to support the
Sentinel-1 correction workflow

### Removed
* The unused `autorift_proc_pair` console script entrypoint was removed

## [0.10.5]

### Fixed
Expand Down
66 changes: 24 additions & 42 deletions hyp3_autorift/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,63 +2,45 @@
AutoRIFT processing for HyP3
"""


import argparse
import os
from argparse import ArgumentParser
import sys
import warnings
from importlib.metadata import entry_points
from pathlib import Path

from hyp3lib.aws import upload_file_to_s3
from hyp3lib.fetch import write_credentials_to_netrc_file
from hyp3lib.image import create_thumbnail

from hyp3_autorift.process import DEFAULT_PARAMETER_FILE, get_datetime, process


def check_earthdata_credentials(username, password):
if username is None:
username = os.getenv('EARTHDATA_USERNAME')

if password is None:
password = os.getenv('EARTHDATA_PASSWORD')

return username, password


def main():
parser = ArgumentParser()
parser.add_argument('--username', help='NASA Earthdata Login username for fetching Sentinel-1 scenes')
parser.add_argument('--password', help='NASA Earthdata Login password for fetching Sentinel-1 scenes')
parser.add_argument('--bucket', help='AWS bucket to upload product files to')
parser.add_argument('--bucket-prefix', default='', help='AWS prefix (location in bucket) to add to product files')
parser.add_argument('--parameter-file', default=DEFAULT_PARAMETER_FILE,
help='Shapefile for determining the correct search parameters by '
'geographic location. '
'Path to shapefile must be understood by GDAL')
parser.add_argument('--naming-scheme', default='ITS_LIVE_OD', choices=['ITS_LIVE_OD', 'ITS_LIVE_PROD', 'ASF'],
help='Naming scheme to use for product files')
parser.add_argument('--omp-num-threads', type=int, help='The number of OpenMP threads to use for parallel regions')
parser.add_argument('granules', type=str.split, nargs='+',
help='Granule pair to process')
args = parser.parse_args()
username, password = check_earthdata_credentials(args.username, args.password)
parser = argparse.ArgumentParser(prefix_chars='+', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'++process', choices=['hyp3_autorift', 's1_correction'], default='hyp3_autorift',
help='Select the console_script entrypoint to use' # console_script entrypoints are specified in `setup.py`
)
parser.add_argument('++omp-num-threads', type=int, help='The number of OpenMP threads to use for parallel regions')

args.granules = [item for sublist in args.granules for item in sublist]
if len(args.granules) != 2:
parser.error('Must provide exactly two granules')
args, unknowns = parser.parse_known_args()

if args.omp_num_threads:
os.environ['OMP_NUM_THREADS'] = str(args.omp_num_threads)

username = os.getenv('EARTHDATA_USERNAME')
password = os.getenv('EARTHDATA_PASSWORD')
if username and password:
write_credentials_to_netrc_file(username, password)
write_credentials_to_netrc_file(username, password, append=False)

g1, g2 = sorted(args.granules, key=get_datetime)
if not (Path.home() / '.netrc').exists():
warnings.warn('Earthdata credentials must be present as environment variables, or in your netrc.', UserWarning)

product_file, browse_file = process(g1, g2, parameter_file=args.parameter_file, naming_scheme=args.naming_scheme)
eps = entry_points()['console_scripts']
(process_entry_point,) = {process for process in eps if process.name == args.process}

if args.bucket:
upload_file_to_s3(product_file, args.bucket, args.bucket_prefix)
upload_file_to_s3(browse_file, args.bucket, args.bucket_prefix)
thumbnail_file = create_thumbnail(browse_file)
upload_file_to_s3(thumbnail_file, args.bucket, args.bucket_prefix)
sys.argv = [args.process, *unknowns]
sys.exit(
process_entry_point.load()()
)


if __name__ == '__main__':
Expand Down
33 changes: 23 additions & 10 deletions hyp3_autorift/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,10 @@
import botocore.exceptions
import numpy as np
import requests
from hyp3lib.aws import upload_file_to_s3
from hyp3lib.fetch import download_file
from hyp3lib.get_orb import downloadSentinelOrbitFile
from hyp3lib.image import create_thumbnail
from hyp3lib.scene import get_download_url
from netCDF4 import Dataset
from osgeo import gdal
Expand Down Expand Up @@ -493,19 +495,30 @@ def process(reference: str, secondary: str, parameter_file: str = DEFAULT_PARAME


def main():
"""Main entrypoint"""
parser = argparse.ArgumentParser(
prog=os.path.basename(__file__),
description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument('reference', type=os.path.abspath,
help='Reference Sentinel-1, Sentinel-2, or Landsat-8 Collection 2 scene')
parser.add_argument('secondary', type=os.path.abspath,
help='Secondary Sentinel-1, Sentinel-2, or Landsat-8 Collection 2 scene')
parser.add_argument('--bucket', help='AWS bucket to upload product files to')
parser.add_argument('--bucket-prefix', default='', help='AWS prefix (location in bucket) to add to product files')
parser.add_argument('--parameter-file', default=DEFAULT_PARAMETER_FILE,
help='Shapefile for determining the correct search parameters by geographic location. '
'Path to shapefile must be understood by GDAL')
parser.add_argument('--naming-scheme', default='ITS_LIVE_OD', choices=['ITS_LIVE_OD', 'ITS_LIVE_PROD', 'ASF'],
help='Naming scheme to use for product files')
parser.add_argument('granules', type=str.split, nargs='+',
help='Granule pair to process')
args = parser.parse_args()

process(**args.__dict__)
args.granules = [item for sublist in args.granules for item in sublist]
if len(args.granules) != 2:
parser.error('Must provide exactly two granules')

g1, g2 = sorted(args.granules, key=get_datetime)

if __name__ == "__main__":
main()
product_file, browse_file = process(g1, g2, parameter_file=args.parameter_file, naming_scheme=args.naming_scheme)

if args.bucket:
upload_file_to_s3(product_file, args.bucket, args.bucket_prefix)
upload_file_to_s3(browse_file, args.bucket, args.bucket_prefix)
thumbnail_file = create_thumbnail(browse_file)
upload_file_to_s3(thumbnail_file, args.bucket, args.bucket_prefix)
67 changes: 67 additions & 0 deletions hyp3_autorift/s1_correction.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import argparse
import copy
import logging
from datetime import timedelta
from pathlib import Path

from hyp3lib.aws import upload_file_to_s3
from hyp3lib.fetch import download_file
from hyp3lib.get_orb import downloadSentinelOrbitFile
from hyp3lib.scene import get_download_url

from hyp3_autorift import geometry, io
from hyp3_autorift.process import DEFAULT_PARAMETER_FILE, get_s1_primary_polarization
from hyp3_autorift.vend.testGeogrid_ISCE import loadParsedata, runGeogrid
log = logging.getLogger(__name__)


def generate_correction_data(scene: str, buffer: int = 0, parameter_file: str = DEFAULT_PARAMETER_FILE):
scene_path = Path(f'{scene}.zip')
if not scene_path.exists():
scene_url = get_download_url(scene)
scene_path = download_file(scene_url, chunk_size=5242880)

orbits = Path('Orbits').resolve()
orbits.mkdir(parents=True, exist_ok=True)
state_vec, oribit_provider = downloadSentinelOrbitFile(scene, directory=str(orbits))
log.info(f'Downloaded orbit file {state_vec} from {oribit_provider}')

polarization = get_s1_primary_polarization(scene)
lat_limits, lon_limits = geometry.bounding_box(f'{scene}.zip', polarization=polarization, orbits=orbits)

scene_poly = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits)
parameter_info = io.find_jpl_parameter_info(scene_poly, parameter_file)

isce_dem = geometry.prep_isce_dem(parameter_info['geogrid']['dem'], lat_limits, lon_limits)
io.format_tops_xml(scene, scene, polarization, isce_dem, orbits)

reference_meta = loadParsedata(str(scene_path), orbit_dir=orbits, aux_dir=orbits, buffer=buffer)

secondary_meta = copy.deepcopy(reference_meta)
spoof_dt = timedelta(days=1)
secondary_meta.sensingStart += spoof_dt
secondary_meta.sensingStop += spoof_dt

geogrid_info = runGeogrid(reference_meta, secondary_meta, epsg=parameter_info['epsg'], **parameter_info['geogrid'])

return geogrid_info


def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument('--bucket', help='AWS bucket to upload product files to')
parser.add_argument('--bucket-prefix', default='', help='AWS prefix (location in bucket) to add to product files')
parser.add_argument('--buffer', type=int, default=0, help='Number of pixels to buffer each edge of the input scene')
parser.add_argument('--parameter-file', default=DEFAULT_PARAMETER_FILE,
help='Shapefile for determining the correct search parameters by geographic location. '
'Path to shapefile must be understood by GDAL')
parser.add_argument('granule', help='Reference granule to process')
args = parser.parse_args()

_ = generate_correction_data(args.granule, buffer=args.buffer)

if args.bucket:
for geotiff in Path.cwd().glob('*.tif'):
upload_file_to_s3(geotiff, args.bucket, args.bucket_prefix)
97 changes: 97 additions & 0 deletions hyp3_autorift/vend/CHANGES-UPSTREAM-78.diff
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
diff --git testGeogrid_ISCE.py testGeogrid_ISCE.py
--- testGeogrid_ISCE.py
+++ testGeogrid_ISCE.py
@@ -67,6 +67,10 @@ def cmdLineParse():
help='Input stable surface mask')
parser.add_argument('-fo', '--flag_optical', dest='optical_flag', type=bool, required=False, default=0,
help='flag for reading optical data (e.g. Landsat): use 1 for on and 0 (default) for off')
+ parser.add_argument('-b', '--buffer', dest='buffer', type=bool, required=False, default=0,
+ help='buffer to add to the starting/end range accounting for all passes from the same relative orbit')
+ parser.add_argument('-p', '--parse', dest='parse', action='store_true',
+ default=False, help='Parse the SAFE zip file to get radar image and orbit metadata; no need to run ISCE')

return parser.parse_args()

@@ -113,7 +117,7 @@ def getMergedOrbit(product):
return orb


-def loadMetadata(indir):
+def loadMetadata(indir,buffer=0):
'''
Input file.
'''
@@ -135,12 +139,57 @@ def loadMetadata(indir):
info.prf = 1.0 / frames[0].bursts[0].azimuthTimeInterval
info.rangePixelSize = frames[0].bursts[0].rangePixelSize
info.lookSide = -1
+
+ info.startingRange -= buffer * info.rangePixelSize
+ info.farRange += buffer * info.rangePixelSize
+
info.numberOfLines = int( np.round( (info.sensingStop - info.sensingStart).total_seconds() * info.prf)) + 1
- info.numberOfSamples = int( np.round( (info.farRange - info.startingRange)/info.rangePixelSize)) + 1
+ info.numberOfSamples = int( np.round( (info.farRange - info.startingRange)/info.rangePixelSize)) + 1 + 2 * buffer
info.orbit = getMergedOrbit(frames)

return info

+def loadParsedata(indir,buffer=0):
+ '''
+ Input file.
+ '''
+ import os
+ import numpy as np
+ import isce
+ from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1
+
+
+ frames = []
+ for swath in range(1,4):
+ rdr=Sentinel1()
+ rdr.configure()
+# rdr.safe=['./S1A_IW_SLC__1SDH_20180401T100057_20180401T100124_021272_024972_8CAF.zip']
+ rdr.safe=[indir]
+ rdr.output='reference'
+ rdr.orbitDir='/Users/yanglei/orbit/S1A/precise'
+ rdr.auxDir='/Users/yanglei/orbit/S1A/aux'
+ rdr.swathNumber=swath
+ rdr.polarization='hh'
+ rdr.parse()
+ frames.append(rdr.product)
+
+ info = Dummy()
+ info.sensingStart = min([x.sensingStart for x in frames])
+ info.sensingStop = max([x.sensingStop for x in frames])
+ info.startingRange = min([x.startingRange for x in frames])
+ info.farRange = max([x.farRange for x in frames])
+ info.prf = 1.0 / frames[0].bursts[0].azimuthTimeInterval
+ info.rangePixelSize = frames[0].bursts[0].rangePixelSize
+ info.lookSide = -1
+
+ info.startingRange -= buffer * info.rangePixelSize
+ info.farRange += buffer * info.rangePixelSize
+
+ info.numberOfLines = int( np.round( (info.sensingStop - info.sensingStart).total_seconds() * info.prf)) + 1
+ info.numberOfSamples = int( np.round( (info.farRange - info.startingRange)/info.rangePixelSize)) + 1 + 2 * buffer
+ info.orbit = getMergedOrbit(frames)
+
+ return info

def coregisterLoadMetadataOptical(indir_m, indir_s):
'''
@@ -383,8 +432,12 @@ def main():
metadata_m, metadata_s = coregisterLoadMetadataOptical(inps.indir_m, inps.indir_s)
runGeogridOptical(metadata_m, metadata_s, inps.demfile, inps.dhdxfile, inps.dhdyfile, inps.vxfile, inps.vyfile, inps.srxfile, inps.sryfile, inps.csminxfile, inps.csminyfile, inps.csmaxxfile, inps.csmaxyfile, inps.ssmfile)
else:
- metadata_m = loadMetadata(inps.indir_m)
- metadata_s = loadMetadata(inps.indir_s)
+ if inps.parse:
+ metadata_m = loadParsedata(inps.indir_m,inps.buffer)
+ metadata_s = loadParsedata(inps.indir_s,inps.buffer)
+ else:
+ metadata_m = loadMetadata(inps.indir_m,inps.buffer)
+ metadata_s = loadMetadata(inps.indir_s,inps.buffer)
runGeogrid(metadata_m, metadata_s, inps.demfile, inps.dhdxfile, inps.dhdyfile, inps.vxfile, inps.vyfile, inps.srxfile, inps.sryfile, inps.csminxfile, inps.csminyfile, inps.csmaxxfile, inps.csmaxyfile, inps.ssmfile)


4 changes: 4 additions & 0 deletions hyp3_autorift/vend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,3 +60,7 @@ We've replaced it with `hyp3_autorift.io.get_topsinsar_config`.
to fix the `noDataMask` used for the search range and %-valid pixel calculations. Unfortunately, this bug exists
upstream but this fix is dependent on changes in (4) which are not easily applied upstream. Therefore, these changes
are *not* expected to be applied upstream to `nasa-jpl/autoRIFT` without a significant refactor upstream.
8. The changes listed in `CHANGES-UPSTREAM-78.diff` were applied from upstream ([nasa-jpl/autorift#78](https://github.com/nasa-jpl/autorift/pull/78))
in [ASFHyP3/hyp3-autorift#218](https://github.com/ASFHyP3/hyp3-autorift/pull/218) to run a Geogrid-only workflow to
create the GeoTIFFs necessary for correcting the scale-projection issue in polar-sterographic products generated
from Sentinel-1 pairs that were created using HyP3 autoRIFT versions < 0.9.0, which was released November 2, 2022
Loading

0 comments on commit ebe4e48

Please sign in to comment.