Skip to content

Commit

Permalink
Merge pull request #95 from ec-jrc/release/ecmwf_nomerge
Browse files Browse the repository at this point in the history
Release/ecmwf nomerge
  • Loading branch information
doc78 authored Apr 13, 2022
2 parents 09bdeec + ccfd9da commit 3b7979c
Show file tree
Hide file tree
Showing 51 changed files with 2,409 additions and 2,172 deletions.
21 changes: 10 additions & 11 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,20 +15,19 @@ Other useful resources
| Lisflood Usecases | | https://github.com/ec-jrc/lisflood-usecases |


## Notes for Release 3.1.0
## Notes for Release 3.2.0

This release includes the following changes:
- a new xarray reader for the water abstraction demand maps;
- a unit test to verify the functioning of the reader for the water abstraction maps;
- bug fixes and improvements to the caching function and the chunking function;
- improvements of the management of latitude and longitude grids to allow maximum precision of the reference system;
- improvements of the definition of the output variables.

With version 3.1.0, the way Lisflood handles I/O has been changed.
All of the changes above improve the code capability to handle input and outputs.
The modelling of the hydrological processes has not been changed.

NetCDF reader for forcings (or any other temporal data) now based on Xarray, allowing more flexibility and efficiency (dramatic improvement in calibration mode, i.e. without NetCDF outputs).
Two new options in the xml settings file:
• NetCDFTimeChunks: chunking size in the time dimension. Recommended value is “auto" but chunking size can be specified manually or set to “-1" to load the whole time series into memory (very fast but expensive in terms of memory).
• MapsCaching (True or False): option designed for the lisflood calibration. If set to True, all the static maps and forcings will be stored in a cache so that they don't have to be loaded by each lisflood instance. This option sets the value of NetCDFTimeChunks to "-1", meaning that the whole time series in the NetCDF inputs is loaded into memory.
Several tests have been added: lat/lon domains, inflows, new reader, etc. This version also makes it easier to use lisflood as a library and to run multiple instances of lisflood in a thread-safe environment. This can be done by doing the following:
import lisf1
...
lisf1.main(settings_file, flags)
IMPORTANT NOTE: the results of the unit tests of this release are different from the results of the unit tests of the release 3.1.1. The differences are due exclusively to a different use of of the optional moduels (e.g. groundwatersmooth) within the .xml setttings file of the unit tests. When using the same settings of .xml file, and the same dataset, v3.1.1 and v3.2.0 provide the same results.

## Quick start

Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
3.1.1
3.2.0
40 changes: 40 additions & 0 deletions requirements_ecmwf.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
attrs==19.3.0
beautifulsoup4==4.8.1
cftime==1.0.4.2
coverage==6.0
Cython==0.29.14
dask==2.7.0
future==0.18.0
GDAL==3.0.4
importlib-metadata==0.23
iniconfig==1.1.1
lisflood-utilities==0.12.18
llvmlite==0.28.0
lxml==4.6.3
more-itertools==7.2.0
netCDF4==1.5.3
nine==1.0.0
numba==0.43.1
numexpr==2.7.0
numpy==1.19.5
packaging==19.2
pandas==0.25.1
pathlib2==2.3.5
pkginfo==1.5.0.1
pluggy==0.13.0
py==1.10.0
pyparsing==2.4.2
pyproj==2.4.0
pytest==6.2.5
pytest-cov==3.0.0
pytest-mock==3.6.1
python-dateutil==2.8.1
pytz==2019.3
PyYAML==5.3
six==1.12.0
soupsieve==1.9.5
toml==0.10.0
tomli==1.2.1
toolz==0.10.0
xarray==0.15.1
zipp==0.6.0
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def _get_gdal_version():
long_description_content_type='text/markdown',
description='LISFLOOD model python module',
author=__authors__,
author_email='domenico.nappo@ext.ec.europa.eu',
author_email='carlo.russo@ext.ec.europa.eu',
keywords=['lisflood', 'lisvap', 'efas', 'glofas', 'copernicus', 'ecmwf'],
license='EUPL 1.2',
url='https://github.com/ec-jrc/lisflood-code',
Expand Down
4 changes: 2 additions & 2 deletions src/lisflood/Lisflood_initial.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,10 @@ def __init__(self):
report_steps = settings.report_steps

if option['readNetcdfStack']:
# get the extent of the maps from the precipitation input maps
# get the extent of the maps from the netCDF template
# and the modelling extent from the MaskMap
# cutmap[] defines the MaskMap inside the precipitation map
_ = CutMap(*mapattrNetCDF(binding['E0Maps'])) # register cutmaps
_ = CutMap(*mapattrNetCDF(binding['netCDFtemplate'])) # register cutmaps
# cutmap[0], cutmap[1], cutmap[2], cutmap[3] = mapattrNetCDF(binding['E0Maps'])
if option['writeNetcdfStack'] or option['writeNetcdf']:
# if NetCDF is writen, the pr.nc is read to get the metadata
Expand Down
6 changes: 3 additions & 3 deletions src/lisflood/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

__version__ = version
__authors__ = "Ad de Roo, Emiliano Gelati, Peter Burek, Johan van der Knijff, Niko Wanders"
__date__ = "20/01/2021"
__copyright__ = "Copyright 2019-2021, European Commission - Joint Research Centre"
__maintainer__ = "Cinzia Mazzetti, Stefania Grimaldi, Emiliano Gelati, Domenico Nappo, Valerio Lorini, Lorenzo Mentaschi, Ad de Roo"
__date__ = "08/04/2022"
__copyright__ = "Copyright 2019-2022, European Commission - Joint Research Centre"
__maintainer__ = "Stefania Grimaldi, Cinzia Mazzetti, Carlo Russo, Valerio Lorini, Ad de Roo"
__status__ = "Operation"
24 changes: 0 additions & 24 deletions src/lisflood/cache.py

This file was deleted.

30 changes: 21 additions & 9 deletions src/lisflood/global_modules/add1.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
from .settings import (calendar_inconsistency_warning, get_calendar_type, calendar, MaskAttrs, CutMap, NetCDFMetadata,
LisSettings, MaskInfo)
from .errors import LisfloodWarning, LisfloodError
from .decorators import iocache
from .decorators import Cache


def defsoil(name1, name2=None, name3=None):
Expand Down Expand Up @@ -116,19 +116,21 @@ def mapattrNetCDF(name):
nf1.close()
maskattrs = MaskAttrs.instance()

cell_x = maskattrs['cell'] - np.abs(x2 - x1) # this must be same precision as pcraster.clone().cellsize()
cell_y = maskattrs['cell'] - np.abs(y2 - y1) # this must be same precision as pcraster.clone().cellsize()
cell_x = np.abs(x2 - x1)
cell_y = np.abs(y2 - y1)
check_x = maskattrs['cell'] - cell_x # this must be same precision as pcraster.clone().cellsize()
check_y = maskattrs['cell'] - cell_y # this must be same precision as pcraster.clone().cellsize()

if abs(cell_x) > 10**-5 or abs(cell_y) > 10**-5:
if abs(check_x) > 10**-5 or abs(check_y) > 10**-5:
raise LisfloodError("Cell size different in maskmap {} and {}".format(
LisSettings.instance().binding['MaskMap'], filename)
)
half_cell = maskattrs['cell'] / 2.
x = x1 - half_cell # |
y = y1 + half_cell # | coordinates of the upper left corner of the input file upper left pixel
cut0 = int(np.abs(maskattrs['x'] - x) / maskattrs['cell'])
cut0 = int(np.abs(maskattrs['x'] - x) / cell_x)
cut1 = cut0 + maskattrs['col']
cut2 = int(np.abs(maskattrs['y'] - y) / maskattrs['cell'])
cut2 = int(np.abs(maskattrs['y'] - y) / cell_y)
cut3 = cut2 + maskattrs['row']
return cut0, cut1, cut2, cut3 # input data will be sliced using [cut0:cut1,cut2:cut3]

Expand Down Expand Up @@ -269,18 +271,27 @@ def makenumpy(map):
def loadmap(*args, **kwargs):
settings = LisSettings.instance()
binding = settings.binding

if binding['MapsCaching'] == "True":
# get path to map file to make sure it's unique in the cache
if len(args) > 0:
name = args[0]
else:
name = kwargs['name']
value = binding[name]
kwargs['value'] = value
data = loadmap_cached(*args, **kwargs)
else:
data = loadmap_base(*args, **kwargs)

return data

@iocache
@Cache
def loadmap_cached(*args, **kwargs):
return loadmap_base(*args, **kwargs)


def loadmap_base(name, pcr=False, lddflag=False, timestampflag='exact', averageyearflag=False):
def loadmap_base(name, pcr=False, lddflag=False, timestampflag='exact', averageyearflag=False, value=None):
""" Load a static map either value or pcraster map or netcdf (single or stack)
Load a static map either value or pcraster map or netcdf (single or stack)
Expand All @@ -303,7 +314,8 @@ def loadmap_base(name, pcr=False, lddflag=False, timestampflag='exact', averagey
settings = LisSettings.instance()
binding = settings.binding
flags = settings.flags
value = binding[name]
if value is None:
value = binding[name]
# path and name of the map file
filename = value
load = False
Expand Down
103 changes: 63 additions & 40 deletions src/lisflood/global_modules/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
"""

from functools import wraps
import copy


def counted(fn):
Expand All @@ -29,6 +30,10 @@ def wrapper(*args, **kwargs):


def cached(f):
"""
Simple cache for small objects like parsing options
"""

_cache = {}

@wraps(f)
Expand All @@ -41,52 +46,70 @@ def _decorator(args):
return _decorator


def iocache(obj):
class Cache:
"""
Class decorator used to cache large objects read from disk
Mostly used for forcings and static maps
"""

cache = {}
found = {}

def __init__(self, fn):
self.name = fn.__name__
self.fn = fn
# we need to put the counter in a dict
# or we lose the reference
if self.name not in self.found:
self.found[self.name] = 0

found = 0
def __call__(self, *args, **kwargs):

@wraps(obj)
def iocache_wrapper(*args, **kwargs):
key = str(args) + str(kwargs)
key = '{}, {}, {}'.format(self.name, args, kwargs)

if key not in cache:
my_obj = obj(*args, **kwargs)
if not isinstance(my_obj, float):
cache[key] = my_obj
to_return = cache[key]
if key not in self.cache:
data = self.fn(*args, **kwargs)
# we don't cache small objects (e.g. floats from loadmap)
if isinstance(data, float):
return_data = data
else:
return my_obj
self.cache[key] = data
return_data = self.cache[key]
else:
nonlocal found
found += 1
to_return = cache[key]
return to_return

def iocache_clear():
return_data = self.cache[key]
self.found[self.name] += 1
return return_data

@classmethod
def clear(cls):
print('Clearing cache')
cache.clear()
nonlocal found
found = 0

def iocache_size():
return len(cache)

def iocache_found():
nonlocal found
return found

def iocache_info():
print('Caching {}'.format(obj))
print('Number of items cached: {}'.format(iocache_size()))
print('Number of items retrieved: {}'.format(iocache_found()))
cls.cache.clear()
for i in cls.found:
cls.found[i] = 0

@classmethod
def size(cls):
return len(cls.cache)

@classmethod
def extract(cls):
return copy.deepcopy(cls.cache)

@classmethod
def apply(cls, cache_in):
# We need to loop to keep the reference to cache
for i in cache_in:
cls.cache[i] = cache_in[i]

@classmethod
def values_found(cls):
return sum(cls.found.values())

@classmethod
def info(cls):
print('Caching')
print('Number of items cached: {}'.format(cls.size()))
print('Number of items retrieved: {}'.format(cls.found))
print('Keys:')
for key in cache.keys():
for key in cls.cache.keys():
print(' - {}'.format(key))

iocache_wrapper.iocache_clear = iocache_clear
iocache_wrapper.iocache_found = iocache_found
iocache_wrapper.iocache_info = iocache_info
iocache_wrapper.iocache_size = iocache_size

return iocache_wrapper
Loading

0 comments on commit 3b7979c

Please sign in to comment.