diff --git a/CHANGELOG.md b/CHANGELOG.md index 145b0cb76..319ceffe5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## Latest Changes +### Fixed +* [620](https://github.com/dbekaert/RAiDER/issues/620) - Fix MERRA-2 access because API was updated + ## [0.4.7] ### Fixed diff --git a/test/__init__.py b/test/__init__.py index 113a8326c..0faf8b712 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -16,7 +16,7 @@ WM_DIR = os.path.join(TEST_DIR, 'weather_files') ORB_DIR = os.path.join(TEST_DIR, 'orbit_files') -WM = 'GMAO' +WM = 'MERRA2' @contextmanager def pushd(dir): diff --git a/test/test_GUNW.py b/test/test_GUNW.py index a57152d2f..bf62a0152 100644 --- a/test/test_GUNW.py +++ b/test/test_GUNW.py @@ -277,7 +277,7 @@ def test_azimuth_timing_interp_against_center_time_interp(weather_model_name: st assert np.nanmax(abs_diff_mm) < 1 -@pytest.mark.parametrize('weather_model_name', ['GMAO', 'HRRR', 'HRES', 'ERA5', 'ERA5']) +@pytest.mark.parametrize('weather_model_name', ['HRRR', 'HRES', 'ERA5', 'ERA5T']) def test_check_weather_model_availability(test_gunw_path_factory, weather_model_name, mocker): # Should be True for all weather models # S1-GUNW-D-R-071-tops-20200130_20200124-135156-34956N_32979N-PP-913f-v2_0_4.nc @@ -288,12 +288,12 @@ def test_check_weather_model_availability(test_gunw_path_factory, weather_model_ mocker.patch("RAiDER.aria.prepFromGUNW.get_acq_time_from_slc_id", side_effect=[pd.Timestamp('2015-01-01'), pd.Timestamp('2014-01-01')]) cond = check_weather_model_availability(test_gunw_path, weather_model_name) - if weather_model_name in ['HRRR', 'GMAO']: + if weather_model_name in ['HRRR', 'MERRA2']: cond = not cond assert cond -@pytest.mark.parametrize('weather_model_name', ['GMAO', 'HRRR']) +@pytest.mark.parametrize('weather_model_name', ['HRRR']) def test_check_weather_model_availability_over_alaska(test_gunw_path_factory, weather_model_name, mocker): # Should be True for all weather models # S1-GUNW-D-R-059-tops-20230320_20220418-180300-00179W_00051N-PP-c92e-v2_0_6.nc @@ -309,7 +309,7 @@ def test_check_weather_model_availability_over_alaska(test_gunw_path_factory, we assert cond -@pytest.mark.parametrize('weather_model_name', ['HRRR', 'GMAO']) +@pytest.mark.parametrize('weather_model_name', ['HRRR']) @pytest.mark.parametrize('location', ['california-t71', 'alaska']) def test_weather_model_availability_integration_using_valid_range(location, test_gunw_path_factory, diff --git a/tools/RAiDER/aria/prepFromGUNW.py b/tools/RAiDER/aria/prepFromGUNW.py index 4561abe4f..d37728be6 100644 --- a/tools/RAiDER/aria/prepFromGUNW.py +++ b/tools/RAiDER/aria/prepFromGUNW.py @@ -26,7 +26,7 @@ from RAiDER.s1_orbits import ensure_orbit_credentials ## cube spacing in degrees for each model -DCT_POSTING = {'HRRR': 0.05, 'HRES': 0.10, 'GMAO': 0.10, 'ERA5': 0.10, 'ERA5T': 0.10} +DCT_POSTING = {'HRRR': 0.05, 'HRES': 0.10, 'GMAO': 0.10, 'ERA5': 0.10, 'ERA5T': 0.10, 'MERRA2': 0.1} def _get_acq_time_from_gunw_id(gunw_id: str, reference_or_secondary: str) -> datetime: @@ -95,7 +95,7 @@ def check_weather_model_availability(gunw_path: str, ---------- gunw_path : str weather_model_name : str - Should be one of 'HRRR', 'HRES', 'ERA5', 'ERA5T', 'GMAO'. + Should be one of 'HRRR', 'HRES', 'ERA5', 'ERA5T', 'GMAO', 'MERRA2'. Returns ------- bool: diff --git a/tools/RAiDER/models/__init__.py b/tools/RAiDER/models/__init__.py index 1d9307bda..79666992e 100644 --- a/tools/RAiDER/models/__init__.py +++ b/tools/RAiDER/models/__init__.py @@ -3,5 +3,7 @@ from .gmao import GMAO from .hres import HRES from .hrrr import HRRR, HRRRAK +from .merra2 import MERRA2 +from .ncmr import NCMR -__all__ = ['HRRR', 'HRRRAK', 'GMAO', 'ERA5', 'ERA5T', 'HRES'] +__all__ = ['HRRR', 'HRRRAK', 'GMAO', 'ERA5', 'ERA5T', 'HRES', 'MERRA2'] diff --git a/tools/RAiDER/models/allowed.py b/tools/RAiDER/models/allowed.py index e5be23652..9d1b1d1ea 100755 --- a/tools/RAiDER/models/allowed.py +++ b/tools/RAiDER/models/allowed.py @@ -3,5 +3,7 @@ 'ERA5T', 'HRRR', 'GMAO', - 'HRES' + 'HRES', + 'MERRA2', + 'NCMR', ] diff --git a/tools/RAiDER/models/gmao.py b/tools/RAiDER/models/gmao.py index 04ab1f507..bf23ec58c 100755 --- a/tools/RAiDER/models/gmao.py +++ b/tools/RAiDER/models/gmao.py @@ -9,7 +9,7 @@ from RAiDER.models.weatherModel import WeatherModel, TIME_RES from RAiDER.logger import logger -from RAiDER.utilFcns import writeWeatherVars2NETCDF4, round_date, requests_retry_session +from RAiDER.utilFcns import writeWeatherVarsXarray, round_date, requests_retry_session from RAiDER.models.model_levels import ( LEVELS_137_HEIGHTS, ) @@ -145,7 +145,7 @@ def _fetch(self, out): try: # Note that lat/lon gets written twice for GMAO because they are the same as y/x - writeWeatherVars2NETCDF4(self, lats, lons, h, qv, p, t, outName=out) + writeWeatherVarsXarray(lat, lon, h, q, p, t, dt, crs, outName=None, NoDataValue=None, chunk=(1, 91, 144)) except Exception: logger.exception("Unable to save weathermodel to file") diff --git a/tools/RAiDER/models/merra2.py b/tools/RAiDER/models/merra2.py index 85a3a7fc4..e4aec16dc 100755 --- a/tools/RAiDER/models/merra2.py +++ b/tools/RAiDER/models/merra2.py @@ -1,4 +1,6 @@ import os +import xarray + import datetime as dt import numpy as np import pydap.cas.urs @@ -8,7 +10,7 @@ from RAiDER.models.weatherModel import WeatherModel from RAiDER.logger import logger -from RAiDER.utilFcns import writeWeatherVars2NETCDF4, read_EarthData_loginInfo +from RAiDER.utilFcns import writeWeatherVarsXarray, read_EarthData_loginInfo from RAiDER.models.model_levels import ( LEVELS_137_HEIGHTS, ) @@ -94,6 +96,8 @@ def _fetch(self, out): self._lon_res ) + lon, lat = np.meshgrid(lons, lats) + if time.year < 1992: url_sub = 100 elif time.year < 2001: @@ -107,39 +111,22 @@ def _fetch(self, out): DT = time - T0 time_ind = int(DT.total_seconds() / 3600.0 / 3.0) - ml_min = 0 - ml_max = 71 - # Earthdata credentials earthdata_usr, earthdata_pwd = read_EarthData_loginInfo(EARTHDATA_RC) # open the dataset and pull the data - url = 'https://goldsmr5.gesdisc.eosdis.nasa.gov:443/opendap/MERRA2/M2I3NVASM.5.12.4/' + time.strftime('%Y/%m') + '/MERRA2_' + str(url_sub) + '.inst3_3d_asm_Nv.' + time.strftime('%Y%m%d') + '.nc4' + url = 'https://goldsmr5.gesdisc.eosdis.nasa.gov/opendap/MERRA2/M2T3NVASM.5.12.4/' + time.strftime('%Y/%m') + '/MERRA2_' + str(url_sub) + '.tavg3_3d_asm_Nv.' + time.strftime('%Y%m%d') + '.nc4' + session = pydap.cas.urs.setup_session(earthdata_usr, earthdata_pwd, check_url=url) - ds = pydap.client.open_url(url, session=session) + stream = pydap.client.open_url(url, session=session) - ############# The MERRA-2 server changes the pydap data retrieval format frequently between these two formats; so better to retain both of them rather than only using either one of them ############# - try: - q = ds['QV'].data[0][time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - p = ds['PL'].data[0][time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - t = ds['T'].data[0][time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - h = ds['H'].data[0][time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - except IndexError: - q = ds['QV'].data[time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - p = ds['PL'].data[time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - t = ds['T'].data[time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - h = ds['H'].data[time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - except AttributeError: - q = ds['QV'][time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - p = ds['PL'][time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - t = ds['T'][time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - h = ds['H'][time_ind, ml_min:(ml_max + 1), lat_min_ind:(lat_max_ind + 1), lon_min_ind:(lon_max_ind + 1)][0] - except BaseException: - logger.exception("MERRA-2: Unable to read weathermodel data") - ######################################################################################################################## + q = stream['QV'][0,:,lat_min_ind:lat_max_ind + 1, lon_min_ind:lon_max_ind + 1].data.squeeze() + p = stream['PL'][0,:,lat_min_ind:lat_max_ind + 1, lon_min_ind:lon_max_ind + 1].data.squeeze() + t = stream['T'][0,:,lat_min_ind:lat_max_ind + 1, lon_min_ind:lon_max_ind + 1].data.squeeze() + h = stream['H'][0,:,lat_min_ind:lat_max_ind + 1, lon_min_ind:lon_max_ind + 1].data.squeeze() try: - writeWeatherVars2NETCDF4(self, lats, lons, h, q, p, t, outName=out) + writeWeatherVarsXarray(lat, lon, h, q, p, t, time, self._proj, outName=out) except Exception as e: logger.debug(e) logger.exception("MERRA-2: Unable to save weathermodel to file") @@ -161,26 +148,19 @@ def _load_model_level(self, filename): ''' # adding the import here should become absolute when transition to netcdf - from netCDF4 import Dataset - with Dataset(filename, mode='r') as f: - lons = np.array(f.variables['x'][:]) - lats = np.array(f.variables['y'][:]) - h = np.array(f.variables['H'][:]) - q = np.array(f.variables['QV'][:]) - p = np.array(f.variables['PL'][:]) - t = np.array(f.variables['T'][:]) - - # restructure the 3-D lat/lon/h in regular grid - _lons = np.broadcast_to(lons[np.newaxis, np.newaxis, :], t.shape) - _lats = np.broadcast_to(lats[np.newaxis, :, np.newaxis], t.shape) + ds = xarray.load_dataset(filename) + lons = ds['longitude'].values + lats = ds['latitude'].values + h = ds['h'].values + q = ds['q'].values + p = ds['p'].values + t = ds['t'].values # Re-structure everything from (heights, lats, lons) to (lons, lats, heights) p = np.transpose(p) q = np.transpose(q) t = np.transpose(t) h = np.transpose(h) - _lats = np.transpose(_lats) - _lons = np.transpose(_lons) # check this # data cube format should be lats,lons,heights @@ -188,24 +168,19 @@ def _load_model_level(self, filename): q = q.swapaxes(0, 1) t = t.swapaxes(0, 1) h = h.swapaxes(0, 1) - _lats = _lats.swapaxes(0, 1) - _lons = _lons.swapaxes(0, 1) # For some reason z is opposite the others p = np.flip(p, axis=2) q = np.flip(q, axis=2) t = np.flip(t, axis=2) h = np.flip(h, axis=2) - _lats = np.flip(_lats, axis=2) - _lons = np.flip(_lons, axis=2) # assign the regular-grid (lat/lon/h) variables - self._p = p self._q = q self._t = t - self._lats = _lats - self._lons = _lons - self._xs = _lons - self._ys = _lats + self._lats = lats + self._lons = lons + self._xs = lons + self._ys = lats self._zs = h diff --git a/tools/RAiDER/models/ncmr.py b/tools/RAiDER/models/ncmr.py index 4f64984aa..01b9f62ad 100755 --- a/tools/RAiDER/models/ncmr.py +++ b/tools/RAiDER/models/ncmr.py @@ -13,9 +13,9 @@ from RAiDER.models.weatherModel import WeatherModel, TIME_RES from RAiDER.logger import logger from RAiDER.utilFcns import ( - writeWeatherVars2NETCDF4, read_NCMR_loginInfo, - show_progress + show_progress, + writeWeatherVarsXarray, ) from RAiDER.models.model_levels import ( LEVELS_137_HEIGHTS, @@ -174,7 +174,7 @@ def _download_ncmr_file(self, out, date_time, bounding_box): ######################################################################################################################## try: - writeWeatherVars2NETCDF4(self, lats, lons, hgt, q, p, t, outName=out) + writeWeatherVarsXarray(lats, lons, hgt, q, p, t, self._time, self._proj, outName=out) except Exception: logger.exception("Unable to save weathermodel to file") diff --git a/tools/RAiDER/models/weatherModel.py b/tools/RAiDER/models/weatherModel.py index 37165f07f..868ed5632 100755 --- a/tools/RAiDER/models/weatherModel.py +++ b/tools/RAiDER/models/weatherModel.py @@ -20,7 +20,7 @@ from RAiDER.models import plotWeather as plots, weatherModel from RAiDER.models.customExceptions import * from RAiDER.utilFcns import ( - robmax, robmin, write2NETCDF4core, calcgeoh, transform_coords, clip_bbox + robmax, robmin, writeWeatherVarsXarray, calcgeoh, transform_coords, clip_bbox ) TIME_RES = {'GMAO': 3, diff --git a/tools/RAiDER/utilFcns.py b/tools/RAiDER/utilFcns.py index dce21673b..a8a7507cc 100755 --- a/tools/RAiDER/utilFcns.py +++ b/tools/RAiDER/utilFcns.py @@ -1,6 +1,7 @@ """Geodesy-related utility functions.""" import os import re +import xarray from datetime import datetime, timedelta from numpy import ndarray @@ -622,199 +623,54 @@ def requests_retry_session(retries=10, session=None): return session -def writeWeatherVars2NETCDF4(self, lat, lon, h, q, p, t, outName=None, NoDataValue=None, chunk=(1, 91, 144), mapping_name='WGS84'): - ''' - By calling the abstract/modular netcdf writer (RAiDER.utilFcns.write2NETCDF4core), write the OpenDAP/PyDAP-retrieved weather model data (GMAO and MERRA-2) to a NETCDF4 file - that can be accessed by external programs. - - The point of doing this is to alleviate some of the memory load of keeping - the full model in memory and make it easier to scale up the program. - ''' - - import netCDF4 - - if outName is None: - outName = os.path.join( - os.getcwd() + '/weather_files', - self._Name + datetime.strftime( - self._time, '_%Y_%m_%d_T%H_%M_%S' - ) + '.nc' - ) - - if NoDataValue is None: - NoDataValue = -9999. - - self._time = getTimeFromFile(outName) - - dimidZ, dimidY, dimidX = t.shape - chunk_lines_Y = np.min([chunk[1], dimidY]) - chunk_lines_X = np.min([chunk[2], dimidX]) - ChunkSize = [1, chunk_lines_Y, chunk_lines_X] - - nc_outfile = netCDF4.Dataset(outName, 'w', clobber=True, format='NETCDF4') - nc_outfile.setncattr('Conventions', 'CF-1.6') - nc_outfile.setncattr('datetime', datetime.strftime(self._time, "%Y_%m_%dT%H_%M_%S")) - nc_outfile.setncattr('date_created', datetime.now().strftime("%Y_%m_%dT%H_%M_%S")) - title = self._Name + ' weather model data' - nc_outfile.setncattr('title', title) - - tran = [lon[0], lon[1] - lon[0], 0.0, lat[0], 0.0, lat[1] - lat[0]] - +def writeWeatherVarsXarray(lat, lon, h, q, p, t, dt, crs, outName=None, NoDataValue=-9999, chunk=(1, 91, 144)): + + # I added datetime as an input to the function and just copied these two lines from merra2 for the attrs_dict + attrs_dict = { + 'datetime': dt.strftime("%Y_%m_%dT%H_%M_%S"), + 'date_created': datetime.now().strftime("%Y_%m_%dT%H_%M_%S"), + 'NoDataValue': NoDataValue, + 'chunksize': chunk, + # 'mapping_name': mapping_name, + } + dimension_dict = { - 'x': {'varname': 'x', - 'datatype': np.dtype('float64'), - 'dimensions': ('x'), - 'length': dimidX, - 'FillValue': None, - 'standard_name': 'longitude', - 'description': 'longitude', - 'dataset': lon, - 'units': 'degrees_east'}, - 'y': {'varname': 'y', - 'datatype': np.dtype('float64'), - 'dimensions': ('y'), - 'length': dimidY, - 'FillValue': None, - 'standard_name': 'latitude', - 'description': 'latitude', - 'dataset': lat, - 'units': 'degrees_north'}, - 'z': {'varname': 'z', - 'datatype': np.dtype('float32'), - 'dimensions': ('z'), - 'length': dimidZ, - 'FillValue': None, - 'standard_name': 'model_layers', - 'description': 'model layers', - 'dataset': np.arange(dimidZ), - 'units': 'layer'} + 'latitude': (('y', 'x'), lat), + 'longitude': (('y', 'x'), lon), } dataset_dict = { - 'h': {'varname': 'H', - 'datatype': np.dtype('float32'), - 'dimensions': ('z', 'y', 'x'), - 'grid_mapping': mapping_name, - 'FillValue': NoDataValue, - 'ChunkSize': ChunkSize, - 'standard_name': 'mid_layer_heights', - 'description': 'mid layer heights', - 'dataset': h, - 'units': 'm'}, - 'q': {'varname': 'QV', - 'datatype': np.dtype('float32'), - 'dimensions': ('z', 'y', 'x'), - 'grid_mapping': mapping_name, - 'FillValue': NoDataValue, - 'ChunkSize': ChunkSize, - 'standard_name': 'specific_humidity', - 'description': 'specific humidity', - 'dataset': q, - 'units': 'kg kg-1'}, - 'p': {'varname': 'PL', - 'datatype': np.dtype('float32'), - 'dimensions': ('z', 'y', 'x'), - 'grid_mapping': mapping_name, - 'FillValue': NoDataValue, - 'ChunkSize': ChunkSize, - 'standard_name': 'mid_level_pressure', - 'description': 'mid level pressure', - 'dataset': p, - 'units': 'Pa'}, - 't': {'varname': 'T', - 'datatype': np.dtype('float32'), - 'dimensions': ('z', 'y', 'x'), - 'grid_mapping': mapping_name, - 'FillValue': NoDataValue, - 'ChunkSize': ChunkSize, - 'standard_name': 'air_temperature', - 'description': 'air temperature', - 'dataset': t, - 'units': 'K'} + 'h': (('z', 'y', 'x'), h), + 'q': (('z', 'y', 'x'), q), + 'p': (('z', 'y', 'x'), p), + 't': (('z', 'y', 'x'), t), } - nc_outfile = write2NETCDF4core(nc_outfile, dimension_dict, dataset_dict, tran, mapping_name='WGS84') - - nc_outfile.sync() # flush data to disk - nc_outfile.close() - - -def write2NETCDF4core(nc_outfile, dimension_dict, dataset_dict, tran, mapping_name='WGS84'): - ''' - The abstract/modular netcdf writer that can be called by a wrapper function to write data to a NETCDF4 file - that can be accessed by external programs. - - The point of doing this is to alleviate some of the memory load of keeping - the full model in memory and make it easier to scale up the program. - ''' - from osgeo import osr - - if mapping_name == 'WGS84': - epsg = 4326 - crs = CRS.from_epsg(epsg) - - grid_mapping = 'WGS84' # need to set this as an attribute for the image variables - else: - crs = CRS.from_wkt(mapping_name) - grid_mapping = 'CRS' - - datatype = np.dtype('S1') - dimensions = () - var = nc_outfile.createVariable( - grid_mapping, - datatype, - dimensions, - fill_value=None - ) - - # variable made, now add attributes - for k, v in crs.to_cf().items(): - var.setncattr(k, v) - - var.setncattr('GeoTransform', ' '.join(str(x) for x in tran)) # note this has pixel size in it - set explicitly above - - for dim in dimension_dict: - nc_outfile.createDimension(dim, dimension_dict[dim]['length']) - varname = dimension_dict[dim]['varname'] - datatype = dimension_dict[dim]['datatype'] - dimensions = dimension_dict[dim]['dimensions'] - FillValue = dimension_dict[dim]['FillValue'] - var = nc_outfile.createVariable(varname, datatype, dimensions, fill_value=FillValue) - var.setncattr('standard_name', dimension_dict[dim]['standard_name']) - var.setncattr('description', dimension_dict[dim]['description']) - var.setncattr('units', dimension_dict[dim]['units']) - var[:] = dimension_dict[dim]['dataset'].astype(dimension_dict[dim]['datatype']) - - for data in dataset_dict: - varname = dataset_dict[data]['varname'] - datatype = dataset_dict[data]['datatype'] - dimensions = dataset_dict[data]['dimensions'] - FillValue = dataset_dict[data]['FillValue'] - ChunkSize = dataset_dict[data]['ChunkSize'] - var = nc_outfile.createVariable( - varname, - datatype, - dimensions, - fill_value=FillValue, - zlib=True, - complevel=2, - shuffle=True, - chunksizes=ChunkSize + ds = xarray.Dataset( + data_vars=dataset_dict, + coords=dimension_dict, + attrs=attrs_dict, ) - # Override with correct name here - var.setncattr('grid_mapping', grid_mapping) # dataset_dict[data]['grid_mapping']) - var.setncattr('standard_name', dataset_dict[data]['standard_name']) - var.setncattr('description', dataset_dict[data]['description']) - if 'units' in dataset_dict[data]: - var.setncattr('units', dataset_dict[data]['units']) - - ndmask = np.isnan(dataset_dict[data]['dataset']) - dataset_dict[data]['dataset'][ndmask] = FillValue - - var[:] = dataset_dict[data]['dataset'].astype(datatype) - - return nc_outfile + + ds['h'].attrs['standard_name'] = 'mid_layer_heights' + ds['p'].attrs['standard_name'] = 'mid_level_pressure' + ds['q'].attrs['standard_name'] = 'specific_humidity' + ds['t'].attrs['standard_name'] = 'air_temperature' + + ds['h'].attrs['units'] = 'm' + ds['p'].attrs['units'] = 'Pa' + ds['q'].attrs['units'] = 'kg kg-1' + ds['t'].attrs['units'] = 'K' + ds["proj"] = int() + for k, v in crs.to_cf().items(): + ds.proj.attrs[k] = v + for var in ds.data_vars: + ds[var].attrs['grid_mapping'] = 'proj' + + ds.to_netcdf(outName) + del ds + def convertLons(inLons): '''Convert lons from 0-360 to -180-180'''