-
Notifications
You must be signed in to change notification settings - Fork 47
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
2 changed files
with
175 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,82 @@ | ||
import xarray as xr | ||
import os | ||
import pystac | ||
import pystac_client | ||
import stackstac | ||
from dask.distributed import Client | ||
import dask | ||
import json | ||
import pandas as pd | ||
|
||
def get_parser(): | ||
parser = argparse.ArgumentParser(description="Search for Sentinel-2 images") | ||
parser.add_argument("cloud_cover", type=str, help="percent cloud cover allowed in images (0-100)") | ||
parser.add_argument("start_month", type=str, help="first month of year to search for images") | ||
parser.add_argument("stop_month", type=str, help="last month of year to search for images") | ||
parser.add_argument("npairs", type=str, help="number of pairs per image") | ||
return parser | ||
|
||
def main(): | ||
parser = get_parser() | ||
args = parser.parse_args() | ||
|
||
# # GDAL environment variables for better performance | ||
# os.environ['AWS_REGION']='us-west-2' | ||
# os.environ['GDAL_DISABLE_READDIR_ON_OPEN']='EMPTY_DIR' | ||
# os.environ['AWS_NO_SIGN_REQUEST']='YES' | ||
|
||
# hardcode bbox for now | ||
bbox = { | ||
"type": "Polygon", | ||
"coordinates": [ | ||
[[75.42382800808971,36.41082887114753], | ||
[75.19442677164156,36.41082887114753], | ||
[75.19442677164156,36.201076360872946], | ||
[75.42382800808971,36.201076360872946], | ||
[75.42382800808971,36.41082887114753]]] | ||
} | ||
|
||
# Use the api from element84 to query the data | ||
URL = "https://earth-search.aws.element84.com/v1" | ||
catalog = pystac_client.Client.open(URL) | ||
|
||
search = catalog.search( | ||
collections=["sentinel-2-l2a"], | ||
intersects=bbox, | ||
query={"eo:cloud_cover": {"lt": args.cloud_cover}} | ||
) | ||
|
||
# Check how many items were returned | ||
items = search.item_collection() | ||
print(f"Returned {len(items)} Items") | ||
|
||
# create xarray dataset without loading data | ||
sentinel2_stack = stackstac.stack(items) | ||
# filter to specified month range | ||
sentinel2_stack_snowoff = sentinel2_stack.where((sentinel2_stack.time.dt.month >= args.start_month) & (sentinel2_stack.time.dt.month <= args.stop_month), drop=True) | ||
|
||
# select first image of each month | ||
period_index = pd.PeriodIndex(sentinel2_stack_snowoff['time'].values, freq='M') | ||
sentinel2_stack_snowoff.coords['year_month'] = ('time', period_index) | ||
first_image_indices = sentinel2_stack_snowoff.groupby('year_month').apply(lambda x: x.isel(time=0)) | ||
|
||
product_names = first_image_indices['s2:product_uri'].values.tolist() | ||
print('\n'.join(product_names)) | ||
|
||
# Create Matrix Job Mapping (JSON Array) | ||
pairs = [] | ||
for r in range(len(product_names) - args.npairs): | ||
for s in range(1, args.npairs + 1 ): | ||
img1_product_name = product_names[r] | ||
img2_product_name = product_names[r+s] | ||
shortname = f'{img1_product_name[11:19]}_{img2_product_name[11:19]}' | ||
pairs.append({'img1_product_name': img1_product_name, 'img2_product_name': img2_product_name, 'name':shortname}) | ||
matrixJSON = f'{{"include":{json.dumps(pairs)}}}' | ||
print(f'number of image pairs: {len(pairs)}') | ||
|
||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f: | ||
print(f'IMAGE_DATES={product_names}', file=f) | ||
print(f'MATRIX_PARAMS_COMBINATIONS={matrixJSON}', file=f) | ||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,93 @@ | ||
#! /usr/bin/env python | ||
|
||
import xarray as xr | ||
import rasterio as rio | ||
import rioxarray | ||
from glob import glob | ||
from datetime import datetime | ||
import seaborn as sns | ||
import matplotlib.pyplot as plt | ||
|
||
# functions to load tifs to xarray | ||
def xr_read_geotif(geotif_file_path, masked=True): | ||
da = rioxarray.open_rasterio(geotif_file_path, masked=True) | ||
# Extract bands and assign as variables in xr.Dataset() | ||
ds = xr.Dataset() | ||
for i, v in enumerate(da.band): | ||
da_tmp = da.sel(band=v) | ||
da_tmp.name = "band" + str(i + 1) | ||
ds[da_tmp.name] = da_tmp | ||
# Delete empty band coordinates. | ||
del ds.coords["band"] | ||
|
||
return ds | ||
|
||
def combine_ds(data_dir, file_type='horizontal_velocity'): | ||
datasets = [] | ||
tif_list = glob(f'{data_dir}/S2*{file_type}.tif') | ||
for tif_path in tif_list: | ||
dates = tif_list[0].split('/')[-1][3:20] #parse filename for dates | ||
start_date = datetime.strptime(dates[:8], '%Y%m%d') | ||
end_date = datetime.strptime(dates[-8:], '%Y%m%d') | ||
t_baseline = end_date - start_date | ||
|
||
src = xr_read_geotif(tif_path, masked=False) #read product to xarray ds | ||
src = src.assign_coords({"dates": dates}) | ||
src = src.expand_dims("dates") | ||
|
||
src = src.assign_coords(start_date = ('dates', [start_date])) | ||
src = src.assign_coords(end_date = ('dates', [end_date])) | ||
src = src.assign_coords(t_baseline = ('dates', [t_baseline])) | ||
|
||
src = src.rename({'band1':file_type}) | ||
|
||
datasets.append(src) | ||
|
||
ds = xr.concat(datasets, dim="dates", combine_attrs="no_conflicts") #create dataset | ||
ds = ds.sortby('dates') | ||
|
||
return ds | ||
|
||
def main(): | ||
# read in tifs | ||
veloc_ds = combine_ds(data_dir='glacier_image_correlation', file_type='horizontal_velocity') | ||
# calculate and save median velocity | ||
veloc_da_median = veloc_ds.horizontal_velocity.median(dim='dates') | ||
# veloc_da_median.rio.to_raster('glacier_image_correlation/median_horizontal_velocity.tif') | ||
# save standard deviation of velocity | ||
veloc_da_stdev = veloc_ds.horizontal_velocity.std(dim='dates') | ||
# veloc_da_stdev.rio.to_raster('glacier_image_correlation/stdev_horizontal_velocity.tif') | ||
# save valid velocity pixel count | ||
veloc_da_count = veloc_ds.horizontal_velocity.count(dim='dates') | ||
# veloc_da_count.rio.to_raster('glacier_image_correlation/count_horizontal_velocity.tif') | ||
|
||
# plot summary statistics | ||
sns.set_theme() | ||
f, ax = plt.subplots(1, 3, figsize=(15, 5), sharex=True, sharey=True) | ||
veloc_da_median.plot(ax=ax[0], vmin=0, vmax=600, cmap='inferno', cbar_kwargs= {'shrink':0.7, 'label':'velocity (m/yr)'}) | ||
veloc_da_stdev.plot(ax=ax[1], vmin=0, vmax=300, cmap='cividis', cbar_kwargs= {'shrink':0.7, 'label':'standard deviation (m/yr)'}) | ||
veloc_da_count.plot(ax=ax[2], vmin=0, cmap='Blues', cbar_kwargs= {'shrink':0.7, 'label':'pixel count'}) | ||
ax[0].set_aspect('equal') | ||
ax[1].set_aspect('equal') | ||
ax[2].set_aspect('equal') | ||
ax[0].set_title(f'median velocity') | ||
ax[1].set_title(f'velocity standard deviation') | ||
ax[2].set_title(f'valid pixel count') | ||
ax[0].set_xticks([]) | ||
ax[0].set_yticks([]) | ||
ax[1].set_xticks([]) | ||
ax[1].set_yticks([]) | ||
ax[2].set_xticks([]) | ||
ax[2].set_yticks([]) | ||
ax[0].set_xlabel('') | ||
ax[0].set_ylabel('') | ||
ax[1].set_xlabel('') | ||
ax[1].set_ylabel('') | ||
ax[2].set_xlabel('') | ||
ax[2].set_ylabel('') | ||
f.tight_layout() | ||
|
||
f.savefig('glacier_image_correlation/velocity_summary_statistics.png', dpi=300) | ||
|
||
if __name__ == "__main__": | ||
main() |