diff --git a/0.15.0/.buildinfo b/0.15.0/.buildinfo deleted file mode 100644 index 290f35e0..00000000 --- a/0.15.0/.buildinfo +++ /dev/null @@ -1,4 +0,0 @@ -# Sphinx build info version 1 -# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: 99b5fb87df43dda210655cb79da09458 -tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/0.15.0/.doctrees/authors.doctree b/0.15.0/.doctrees/authors.doctree deleted file mode 100644 index 8c234201..00000000 Binary files a/0.15.0/.doctrees/authors.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/contributing.doctree b/0.15.0/.doctrees/contributing.doctree deleted file mode 100644 index eafc9194..00000000 Binary files a/0.15.0/.doctrees/contributing.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/environment.pickle b/0.15.0/.doctrees/environment.pickle deleted file mode 100644 index 461915f6..00000000 Binary files a/0.15.0/.doctrees/environment.pickle and /dev/null differ diff --git a/0.15.0/.doctrees/examples/COG.doctree b/0.15.0/.doctrees/examples/COG.doctree deleted file mode 100644 index dc50f1df..00000000 Binary files a/0.15.0/.doctrees/examples/COG.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/clip_box.doctree b/0.15.0/.doctrees/examples/clip_box.doctree deleted file mode 100644 index 27bf308e..00000000 Binary files a/0.15.0/.doctrees/examples/clip_box.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/clip_geom.doctree b/0.15.0/.doctrees/examples/clip_geom.doctree deleted file mode 100644 index a581b705..00000000 Binary files a/0.15.0/.doctrees/examples/clip_geom.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/convert_to_raster.doctree b/0.15.0/.doctrees/examples/convert_to_raster.doctree deleted file mode 100644 index 31501f16..00000000 Binary files a/0.15.0/.doctrees/examples/convert_to_raster.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/dask_read_write.doctree b/0.15.0/.doctrees/examples/dask_read_write.doctree deleted file mode 100644 index f3a218c4..00000000 Binary files a/0.15.0/.doctrees/examples/dask_read_write.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/examples.doctree b/0.15.0/.doctrees/examples/examples.doctree deleted file mode 100644 index 12a5f420..00000000 Binary files a/0.15.0/.doctrees/examples/examples.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/interpolate_na.doctree b/0.15.0/.doctrees/examples/interpolate_na.doctree deleted file mode 100644 index bdd9d98d..00000000 Binary files a/0.15.0/.doctrees/examples/interpolate_na.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/merge.doctree b/0.15.0/.doctrees/examples/merge.doctree deleted file mode 100644 index 94e9aa04..00000000 Binary files a/0.15.0/.doctrees/examples/merge.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/pad_box.doctree b/0.15.0/.doctrees/examples/pad_box.doctree deleted file mode 100644 index ed1fd45b..00000000 Binary files a/0.15.0/.doctrees/examples/pad_box.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/read-locks.doctree b/0.15.0/.doctrees/examples/read-locks.doctree deleted file mode 100644 index 1ac2267c..00000000 Binary files a/0.15.0/.doctrees/examples/read-locks.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/reproject.doctree b/0.15.0/.doctrees/examples/reproject.doctree deleted file mode 100644 index f7382313..00000000 Binary files a/0.15.0/.doctrees/examples/reproject.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/reproject_match.doctree b/0.15.0/.doctrees/examples/reproject_match.doctree deleted file mode 100644 index 7f71a2b9..00000000 Binary files a/0.15.0/.doctrees/examples/reproject_match.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/resampling.doctree b/0.15.0/.doctrees/examples/resampling.doctree deleted file mode 100644 index 6cb230f7..00000000 Binary files a/0.15.0/.doctrees/examples/resampling.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/examples/transform_bounds.doctree b/0.15.0/.doctrees/examples/transform_bounds.doctree deleted file mode 100644 index e72cdca4..00000000 Binary files a/0.15.0/.doctrees/examples/transform_bounds.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/getting_started/crs_management.doctree b/0.15.0/.doctrees/getting_started/crs_management.doctree deleted file mode 100644 index e37b4f96..00000000 Binary files a/0.15.0/.doctrees/getting_started/crs_management.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/getting_started/getting_started.doctree b/0.15.0/.doctrees/getting_started/getting_started.doctree deleted file mode 100644 index a7912e44..00000000 Binary files a/0.15.0/.doctrees/getting_started/getting_started.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/getting_started/manage_information_loss.doctree b/0.15.0/.doctrees/getting_started/manage_information_loss.doctree deleted file mode 100644 index 3ec9cde1..00000000 Binary files a/0.15.0/.doctrees/getting_started/manage_information_loss.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/getting_started/nodata_management.doctree b/0.15.0/.doctrees/getting_started/nodata_management.doctree deleted file mode 100644 index 28f7bb03..00000000 Binary files a/0.15.0/.doctrees/getting_started/nodata_management.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/history.doctree b/0.15.0/.doctrees/history.doctree deleted file mode 100644 index 94113632..00000000 Binary files a/0.15.0/.doctrees/history.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/index.doctree b/0.15.0/.doctrees/index.doctree deleted file mode 100644 index a9cdc479..00000000 Binary files a/0.15.0/.doctrees/index.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/installation.doctree b/0.15.0/.doctrees/installation.doctree deleted file mode 100644 index 7bf5e3fc..00000000 Binary files a/0.15.0/.doctrees/installation.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/modules.doctree b/0.15.0/.doctrees/modules.doctree deleted file mode 100644 index 21734985..00000000 Binary files a/0.15.0/.doctrees/modules.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/readme.doctree b/0.15.0/.doctrees/readme.doctree deleted file mode 100644 index d7f6c71b..00000000 Binary files a/0.15.0/.doctrees/readme.doctree and /dev/null differ diff --git a/0.15.0/.doctrees/rioxarray.doctree b/0.15.0/.doctrees/rioxarray.doctree deleted file mode 100644 index a70796ff..00000000 Binary files a/0.15.0/.doctrees/rioxarray.doctree and /dev/null differ diff --git a/0.15.0/_modules/index.html b/0.15.0/_modules/index.html deleted file mode 100644 index 063873f8..00000000 --- a/0.15.0/_modules/index.html +++ /dev/null @@ -1,119 +0,0 @@ - - -
- - -
-"""
-
-Credits:
-
-This file was adopted from: https://github.com/pydata/xarray # noqa
-Source file: https://github.com/pydata/xarray/blob/1d7bcbdc75b6d556c04e2c7d7a042e4379e15303/xarray/backends/rasterio_.py # noqa
-"""
-# pylint: disable=too-many-lines
-import contextlib
-import functools
-import importlib.metadata
-import os
-import re
-import threading
-import warnings
-from collections import defaultdict
-from collections.abc import Hashable, Iterable
-from typing import Any, Optional, Union
-
-import numpy
-import rasterio
-from numpy.typing import NDArray
-from packaging import version
-from rasterio.errors import NotGeoreferencedWarning
-from rasterio.vrt import WarpedVRT
-from xarray import Dataset, IndexVariable
-from xarray.backends.common import BackendArray
-from xarray.backends.file_manager import CachingFileManager, FileManager
-from xarray.backends.locks import SerializableLock
-from xarray.coding import times, variables
-from xarray.core import indexing
-from xarray.core.dataarray import DataArray
-from xarray.core.dtypes import maybe_promote
-from xarray.core.utils import is_scalar
-from xarray.core.variable import as_variable
-
-from rioxarray.exceptions import RioXarrayError
-from rioxarray.rioxarray import _generate_spatial_coords
-
-FILL_VALUE_NAMES = ("_FillValue", "missing_value", "fill_value", "nodata")
-UNWANTED_RIO_ATTRS = ("nodatavals", "is_tiled", "res")
-# TODO: should this be GDAL_LOCK instead?
-RASTERIO_LOCK = SerializableLock()
-NO_LOCK = contextlib.nullcontext()
-
-
-def _ensure_warped_vrt(riods, vrt_params):
- """
- Ensuire the dataset is represented as a warped vrt
- """
- if vrt_params is None:
- return riods
- if isinstance(riods, SingleBandDatasetReader):
- riods._create_vrt(vrt_params)
- else:
- riods = WarpedVRT(riods, **vrt_params)
- return riods
-
-
-class SingleBandDatasetReader:
- """
- Hack to have a DatasetReader behave like it only has one band
- """
-
- def __init__(self, riods, bidx, vrt_params=None) -> None:
- self._riods = riods
- self._bidx = bidx
- self._vrt_params = vrt_params
- self._create_vrt(vrt_params=vrt_params)
-
- def __getattr__(self, __name: str) -> Any:
- return getattr(self._riods, __name)
-
- def _create_vrt(self, vrt_params):
- if vrt_params is not None and not isinstance(self._riods, WarpedVRT):
- self._riods = WarpedVRT(self._riods, **vrt_params)
- self._vrt_params = vrt_params
-
- @property
- def name(self):
- """
- str: name of the dataset. Usually the path.
- """
- if isinstance(self._riods, rasterio.vrt.WarpedVRT):
- return self._riods.src_dataset.name
- return self._riods.name
-
- @property
- def count(self):
- """
- int: band count
- """
- return 1
-
- @property
- def nodata(self):
- """
- Nodata value for the band
- """
- return self._riods.nodatavals[self._bidx]
-
- @property
- def offsets(self):
- """
- Offset value for the band
- """
- return [self._riods.offsets[self._bidx]]
-
- @property
- def scales(self):
- """
- Scale value for the band
- """
- return [self._riods.scales[self._bidx]]
-
- @property
- def units(self):
- """
- Unit for the band
- """
- return [self._riods.units[self._bidx]]
-
- @property
- def descriptions(self):
- """
- Description for the band
- """
- return [self._riods.descriptions[self._bidx]]
-
- @property
- def dtypes(self):
- """
- dtype for the band
- """
- return [self._riods.dtypes[self._bidx]]
-
- @property
- def indexes(self):
- """
- indexes for the band
- """
- return [self._riods.indexes[self._bidx]]
-
- def read(self, indexes=None, **kwargs): # pylint: disable=unused-argument
- """
- read data for the band
- """
- return self._riods.read(indexes=self._bidx + 1, **kwargs)
-
- def tags(self, bidx=None, **kwargs): # pylint: disable=unused-argument
- """
- read tags for the band
- """
- return self._riods.tags(bidx=self._bidx + 1, **kwargs)
-
-
-RasterioReader = Union[
- rasterio.io.DatasetReader, rasterio.vrt.WarpedVRT, SingleBandDatasetReader
-]
-
-
-try:
- _DASK_GTE_018 = version.parse(importlib.metadata.version("dask")) >= version.parse(
- "0.18.0"
- )
-except importlib.metadata.PackageNotFoundError:
- _DASK_GTE_018 = False
-
-
-def _get_unsigned_dtype(unsigned, dtype):
- """
- Based on: https://github.com/pydata/xarray/blob/abe1e613a96b000ae603c53d135828df532b952e/xarray/coding/variables.py#L306-L334
- """
- dtype = numpy.dtype(dtype)
- if unsigned is True and dtype.kind == "i":
- return numpy.dtype(f"u{dtype.itemsize}")
- if unsigned is False and dtype.kind == "u":
- return numpy.dtype(f"i{dtype.itemsize}")
- return None
-
-
-class FileHandleLocal(threading.local):
- """
- This contains the thread local ThreadURIManager
- """
-
- def __init__(self): # pylint: disable=super-init-not-called
- self.thread_manager = None # Initialises in each thread
-
-
-class ThreadURIManager:
- """
- This handles opening & closing file handles in each thread.
- """
-
- def __init__(
- self,
- opener,
- *args,
- mode="r",
- kwargs=None,
- ):
- self._opener = opener
- self._args = args
- self._mode = mode
- self._kwargs = {} if kwargs is None else dict(kwargs)
- self._file_handle = None
-
- @property
- def file_handle(self):
- """
- File handle returned by the opener.
- """
- if self._file_handle is not None:
- return self._file_handle
- self._file_handle = self._opener(*self._args, mode=self._mode, **self._kwargs)
- return self._file_handle
-
- def close(self):
- """
- Close file handle.
- """
- if self._file_handle is not None:
- self._file_handle.close()
- self._file_handle = None
-
- def __del__(self):
- self.close()
-
- def __enter__(self):
- return self
-
- def __exit__(self, type_, value, traceback):
- self.close()
-
-
-class URIManager(FileManager):
- """
- The URI manager is used for lockless reading
- """
-
- def __init__(
- self,
- opener,
- *args,
- mode="r",
- kwargs=None,
- ):
- self._opener = opener
- self._args = args
- self._mode = mode
- self._kwargs = {} if kwargs is None else dict(kwargs)
- self._local = FileHandleLocal()
-
- def acquire(self, needs_lock=True):
- if self._local.thread_manager is None:
- self._local.thread_manager = ThreadURIManager(
- self._opener, *self._args, mode=self._mode, kwargs=self._kwargs
- )
- return self._local.thread_manager.file_handle
-
- @contextlib.contextmanager
- def acquire_context(self, needs_lock=True):
- try:
- yield self.acquire(needs_lock=needs_lock)
- except Exception:
- self.close(needs_lock=needs_lock)
- raise
-
- def close(self, needs_lock=True):
- if self._local.thread_manager is not None:
- self._local.thread_manager.close()
- self._local.thread_manager = None
-
- def __del__(self):
- self.close(needs_lock=False)
-
- def __getstate__(self):
- """State for pickling."""
- return (self._opener, self._args, self._mode, self._kwargs)
-
- def __setstate__(self, state):
- """Restore from a pickle."""
- opener, args, mode, kwargs = state
- self.__init__(opener, *args, mode=mode, kwargs=kwargs)
-
-
-class RasterioArrayWrapper(BackendArray):
- """A wrapper around rasterio dataset objects"""
-
- # pylint: disable=too-many-instance-attributes
-
- def __init__(
- self,
- manager,
- lock,
- name,
- vrt_params=None,
- masked=False,
- mask_and_scale=False,
- unsigned=False,
- ):
- self.manager = manager
- self.lock = lock
- self.masked = masked or mask_and_scale
- self.mask_and_scale = mask_and_scale
-
- # cannot save riods as an attribute: this would break pickleability
- riods = _ensure_warped_vrt(manager.acquire(), vrt_params)
- self.vrt_params = vrt_params
- self._shape = (riods.count, riods.height, riods.width)
-
- self._dtype = None
- self._unsigned_dtype = None
- self._fill_value = riods.nodata
- dtypes = riods.dtypes
- if not numpy.all(numpy.asarray(dtypes) == dtypes[0]):
- raise ValueError("All bands should have the same dtype")
-
- dtype = _rasterio_to_numpy_dtype(dtypes)
- if mask_and_scale and unsigned is not None:
- self._unsigned_dtype = _get_unsigned_dtype(
- unsigned=unsigned,
- dtype=dtype,
- )
- if self._unsigned_dtype is not None and self._fill_value is not None:
- self._fill_value = self._unsigned_dtype.type(self._fill_value)
- if self._unsigned_dtype is None and dtype.kind not in ("i", "u"):
- warnings.warn(
- f"variable {name!r} has _Unsigned attribute but is not "
- "of integer type. Ignoring attribute.",
- variables.SerializationWarning,
- stacklevel=3,
- )
- if self.masked:
- self._dtype, self._fill_value = maybe_promote(dtype)
- else:
- self._dtype = dtype
-
- @property
- def dtype(self):
- """
- Data type of the array
- """
- return self._dtype
-
- @property
- def fill_value(self):
- """
- Fill value of the array
- """
- return self._fill_value
-
- @property
- def shape(self):
- """
- Shape of the array
- """
- return self._shape
-
- def _get_indexer(self, key):
- """Get indexer for rasterio array.
-
- Parameter
- ---------
- key: tuple of int
-
- Returns
- -------
- band_key: an indexer for the 1st dimension
- window: two tuples. Each consists of (start, stop).
- squeeze_axis: axes to be squeezed
- np_ind: indexer for loaded numpy array
-
- See also
- --------
- indexing.decompose_indexer
- """
- if len(key) != 3:
- raise RioXarrayError("rasterio datasets should always be 3D")
-
- # bands cannot be windowed but they can be listed
- band_key = key[0]
- np_inds = []
- # bands (axis=0) cannot be windowed but they can be listed
- if isinstance(band_key, slice):
- start, stop, step = band_key.indices(self.shape[0])
- band_key = numpy.arange(start, stop, step)
- # be sure we give out a list
- band_key = (numpy.asarray(band_key) + 1).tolist()
- if isinstance(band_key, list): # if band_key is not a scalar
- np_inds.append(slice(None))
-
- # but other dims can only be windowed
- window = []
- squeeze_axis = []
- for iii, (ikey, size) in enumerate(zip(key[1:], self.shape[1:])):
- if isinstance(ikey, slice):
- # step is always positive. see indexing.decompose_indexer
- start, stop, step = ikey.indices(size)
- np_inds.append(slice(None, None, step))
- elif is_scalar(ikey):
- # windowed operations will always return an array
- # we will have to squeeze it later
- squeeze_axis.append(-(2 - iii))
- start = ikey
- stop = ikey + 1
- else:
- start, stop = numpy.min(ikey), numpy.max(ikey) + 1
- np_inds.append(ikey - start)
- window.append((start, stop))
-
- if isinstance(key[1], numpy.ndarray) and isinstance(key[2], numpy.ndarray):
- # do outer-style indexing
- np_inds[-2:] = numpy.ix_(*np_inds[-2:])
-
- return band_key, tuple(window), tuple(squeeze_axis), tuple(np_inds)
-
- def _getitem(self, key):
- band_key, window, squeeze_axis, np_inds = self._get_indexer(key)
- if not band_key or any(start == stop for (start, stop) in window):
- # no need to do IO
- shape = (len(band_key),) + tuple(stop - start for (start, stop) in window)
- out = numpy.zeros(shape, dtype=self.dtype)
- else:
- with self.lock:
- riods = _ensure_warped_vrt(
- self.manager.acquire(needs_lock=False), self.vrt_params
- )
- out = riods.read(band_key, window=window, masked=self.masked)
- if self._unsigned_dtype is not None:
- out = out.astype(self._unsigned_dtype)
- if self.masked:
- out = numpy.ma.filled(out.astype(self.dtype), self.fill_value)
- if self.mask_and_scale:
- if not isinstance(band_key, Iterable):
- out = (
- out * riods.scales[band_key - 1]
- + riods.offsets[band_key - 1]
- )
- else:
- for iii, band_iii in enumerate(numpy.atleast_1d(band_key) - 1):
- out[iii] = (
- out[iii] * riods.scales[band_iii]
- + riods.offsets[band_iii]
- )
-
- if squeeze_axis:
- out = numpy.squeeze(out, axis=squeeze_axis)
- return out[np_inds]
-
- def __getitem__(self, key):
- return indexing.explicit_indexing_adapter(
- key, self.shape, indexing.IndexingSupport.OUTER, self._getitem
- )
-
-
-def _parse_envi(meta):
- """Parse ENVI metadata into Python data structures.
-
- See the link for information on the ENVI header file format:
- http://www.harrisgeospatial.com/docs/enviheaderfiles.html
-
- Parameters
- ----------
- meta : dict
- Dictionary of keys and str values to parse, as returned by the rasterio
- tags(ns='ENVI') call.
-
- Returns
- -------
- parsed_meta : dict
- Dictionary containing the original keys and the parsed values
-
- """
-
- def parsevec(value):
- return numpy.fromstring(value.strip("{}"), dtype="float", sep=",")
-
- def default(value):
- return value.strip("{}")
-
- parse = {"wavelength": parsevec, "fwhm": parsevec}
- parsed_meta = {key: parse.get(key, default)(value) for key, value in meta.items()}
- return parsed_meta
-
-
-def _rasterio_to_numpy_dtype(dtypes):
- """Numpy dtype from first entry of rasterio dataset.dtypes"""
- # rasterio has some special dtype names (complex_int16 -> numpy.complex64)
- if dtypes[0] == "complex_int16":
- dtype = numpy.dtype("complex64")
- else:
- dtype = numpy.dtype(dtypes[0])
-
- return dtype
-
-
-def _to_numeric(value: Any) -> float:
- """
- Convert the value to a number
- """
- try:
- value = int(value)
- except (TypeError, ValueError):
- try:
- value = float(value)
- except (TypeError, ValueError):
- pass
- return value
-
-
-def _parse_tag(key: str, value: Any) -> tuple[str, Any]:
- # NC_GLOBAL is appended to tags with netcdf driver and is not really needed
- key = key.split("NC_GLOBAL#")[-1]
- if value.startswith("{") and value.endswith("}"):
- try:
- new_val = numpy.fromstring(value.strip("{}"), dtype="float", sep=",")
- # pylint: disable=len-as-condition
- value = new_val if len(new_val) else _to_numeric(value)
- except ValueError:
- value = _to_numeric(value)
- else:
- value = _to_numeric(value)
- return key, value
-
-
-def _parse_tags(tags: dict) -> dict:
- parsed_tags = {}
- for key, value in tags.items():
- key, value = _parse_tag(key, value)
- parsed_tags[key] = value
- return parsed_tags
-
-
-NETCDF_DTYPE_MAP = {
- 0: object, # NC_NAT
- 1: numpy.byte, # NC_BYTE
- 2: numpy.char, # NC_CHAR
- 3: numpy.short, # NC_SHORT
- 4: numpy.int_, # NC_INT, NC_LONG
- 5: float, # NC_FLOAT
- 6: numpy.double, # NC_DOUBLE
- 7: numpy.ubyte, # NC_UBYTE
- 8: numpy.ushort, # NC_USHORT
- 9: numpy.uint, # NC_UINT
- 10: numpy.int64, # NC_INT64
- 11: numpy.uint64, # NC_UINT64
- 12: object, # NC_STRING
-}
-
-
-def _load_netcdf_attrs(tags: dict, data_array: DataArray) -> None:
- """
- Loads the netCDF attributes into the data array
-
- Attributes stored in this format:
- - variable_name#attr_name: attr_value
- """
- for key, value in tags.items():
- key, value = _parse_tag(key, value)
- key_split = key.split("#")
- if len(key_split) != 2:
- continue
- variable_name, attr_name = key_split
- if variable_name in data_array.coords:
- data_array.coords[variable_name].attrs.update({attr_name: value})
-
-
-def _parse_netcdf_attr_array(attr: Union[NDArray, str], dtype=None) -> NDArray:
- """
- Expected format: '{2,6}' or '[2. 6.]'
- """
- value: Union[NDArray, str, list]
- if isinstance(attr, str):
- if attr.startswith("{"):
- value = attr.strip("{}").split(",")
- else:
- value = attr.strip("[]").split()
- elif not isinstance(attr, Iterable):
- value = [attr]
- else:
- value = attr
- return numpy.array(value, dtype=dtype)
-
-
-def _load_netcdf_1d_coords(tags: dict) -> dict:
- """
- Dimension information:
- - NETCDF_DIM_EXTRA: '{time}' (comma separated list of dim names)
- - NETCDF_DIM_time_DEF: '{2,6}' or '[2. 6.]' (dim size, dim dtype)
- - NETCDF_DIM_time_VALUES: '{0,872712.659688}' (comma separated list of data) or [ 0. 872712.659688]
- """
- dim_names = tags.get("NETCDF_DIM_EXTRA")
- if not dim_names:
- return {}
- dim_names = _parse_netcdf_attr_array(dim_names)
- coords = {}
- for dim_name in dim_names:
- dim_def = tags.get(f"NETCDF_DIM_{dim_name}_DEF")
- if dim_def is None:
- continue
- # pylint: disable=unused-variable
- dim_size, dim_dtype = _parse_netcdf_attr_array(dim_def)
- dim_dtype = NETCDF_DTYPE_MAP.get(int(float(dim_dtype)), object)
- dim_values = _parse_netcdf_attr_array(tags[f"NETCDF_DIM_{dim_name}_VALUES"])
- coords[dim_name] = IndexVariable(dim_name, dim_values)
- return coords
-
-
-def build_subdataset_filter(
- group_names: Optional[Union[str, list[str], tuple[str, ...]]],
- variable_names: Optional[Union[str, list[str], tuple[str, ...]]],
-):
- """
- Example::
- 'HDF4_EOS:EOS_GRID:"./modis/MOD09GQ.A2017290.h11v04.006.NRT.hdf":
- MODIS_Grid_2D:sur_refl_b01_1'
-
- Parameters
- ----------
- group_names: str or list or tuple
- Name or names of netCDF groups to filter by.
-
- variable_names: str or list or tuple
- Name or names of netCDF variables to filter by.
-
- Returns
- -------
- re.SRE_Pattern: output of re.compile()
- """
- variable_query = r"\w+"
- if variable_names is not None:
- if not isinstance(variable_names, (tuple, list)):
- variable_names = [variable_names]
- variable_names = [re.escape(variable_name) for variable_name in variable_names]
- variable_query = rf"(?:{'|'.join(variable_names)})"
- if group_names is not None:
- if not isinstance(group_names, (tuple, list)):
- group_names = [group_names]
- group_names = [re.escape(group_name) for group_name in group_names]
- group_query = rf"(?:{'|'.join(group_names)})"
- else:
- return re.compile(r"".join([r".*(?:\:/|\:)(/+)?", variable_query, r"$"]))
- return re.compile(
- r"".join(
- [r".*(?:\:/|\:)(/+)?", group_query, r"[:/](/+)?", variable_query, r"$"]
- )
- )
-
-
-def _get_rasterio_attrs(riods: RasterioReader):
- """
- Get rasterio specific attributes
- """
- # pylint: disable=too-many-branches
- # Add rasterio attributes
- attrs = _parse_tags({**riods.tags(), **riods.tags(1)})
- # remove attributes with informaiton
- # that should be added by GDAL/rasterio
- for unwanted_attr in FILL_VALUE_NAMES + UNWANTED_RIO_ATTRS:
- attrs.pop(unwanted_attr, None)
- if riods.nodata is not None:
- # The nodata values for the raster bands
- attrs["_FillValue"] = riods.nodata
- # The scale values for the raster bands
- if len(set(riods.scales)) > 1:
- attrs["scales"] = riods.scales
- warnings.warn(
- "Offsets differ across bands. The 'scale_factor' attribute will "
- "not be added. See the 'scales' attribute."
- )
- else:
- attrs["scale_factor"] = riods.scales[0]
- # The offset values for the raster bands
- if len(set(riods.offsets)) > 1:
- attrs["offsets"] = riods.offsets
- warnings.warn(
- "Offsets differ across bands. The 'add_offset' attribute will "
- "not be added. See the 'offsets' attribute."
- )
- else:
- attrs["add_offset"] = riods.offsets[0]
- if any(riods.descriptions):
- if len(set(riods.descriptions)) == 1:
- attrs["long_name"] = riods.descriptions[0]
- else:
- # Descriptions for each dataset band
- attrs["long_name"] = riods.descriptions
- if any(riods.units):
- # A list of units string for each dataset band
- if len(riods.units) == 1:
- attrs["units"] = riods.units[0]
- else:
- attrs["units"] = riods.units
-
- return attrs
-
-
-def _decode_datetime_cf(
- data_array: DataArray,
- decode_times: bool,
- decode_timedelta: Optional[bool],
-) -> DataArray:
- """
- Decide the datetime based on CF conventions
- """
- if decode_timedelta is None:
- decode_timedelta = decode_times
-
- for coord in data_array.coords:
- time_var = None
- if decode_times and "since" in data_array[coord].attrs.get("units", ""):
- time_var = times.CFDatetimeCoder(use_cftime=True).decode(
- as_variable(data_array[coord]), name=coord
- )
- elif (
- decode_timedelta
- and data_array[coord].attrs.get("units") in times.TIME_UNITS
- ):
- time_var = times.CFTimedeltaCoder().decode(
- as_variable(data_array[coord]), name=coord
- )
- if time_var is not None:
- dimensions, data, attributes, encoding = variables.unpack_for_decoding(
- time_var
- )
- data_array = data_array.assign_coords(
- {
- coord: IndexVariable(
- dims=dimensions,
- data=data,
- attrs=attributes,
- encoding=encoding,
- )
- }
- )
- return data_array
-
-
-def _parse_driver_tags(
- riods: RasterioReader,
- attrs: dict,
- coords: dict,
-) -> None:
- # Parse extra metadata from tags, if supported
- parsers = {"ENVI": _parse_envi}
-
- driver = riods.driver
- if driver in parsers:
- meta = parsers[driver](riods.tags(ns=driver))
-
- for key, value in meta.items():
- # Add values as coordinates if they match the band count,
- # as attributes otherwise
- if isinstance(value, (list, numpy.ndarray)) and len(value) == riods.count:
- coords[key] = ("band", numpy.asarray(value))
- else:
- attrs[key] = value
-
-
-def _pop_global_netcdf_attrs_from_vars(dataset_to_clean: Dataset) -> Dataset:
- # remove GLOBAL netCDF attributes from dataset variables
- for coord in dataset_to_clean.coords:
- for variable in dataset_to_clean.variables:
- dataset_to_clean[variable].attrs = {
- attr: value
- for attr, value in dataset_to_clean[variable].attrs.items()
- if attr not in dataset_to_clean.attrs
- and not attr.startswith(f"{coord}#")
- }
- return dataset_to_clean
-
-
-def _subdataset_groups_to_dataset(
- dim_groups: dict[Hashable, dict[Hashable, DataArray]], global_tags: dict
-) -> Union[Dataset, list[Dataset]]:
- if dim_groups:
- dataset: Union[Dataset, list[Dataset]] = []
- for dim_group in dim_groups.values():
- dataset_group = _pop_global_netcdf_attrs_from_vars(
- Dataset(dim_group, attrs=global_tags)
- )
-
- def _ds_close():
- # pylint: disable=cell-var-from-loop
- for data_var in dim_group.values():
- data_var.close()
-
- dataset_group.set_close(_ds_close)
- dataset.append(dataset_group)
- if len(dataset) == 1:
- dataset = dataset.pop()
- else:
- dataset = Dataset(attrs=global_tags)
- return dataset
-
-
-def _load_subdatasets(
- riods: RasterioReader,
- group: Optional[Union[str, list[str], tuple[str, ...]]],
- variable: Optional[Union[str, list[str], tuple[str, ...]]],
- parse_coordinates: bool,
- chunks: Optional[Union[int, tuple, dict]],
- cache: Optional[bool],
- lock: Any,
- masked: bool,
- mask_and_scale: bool,
- decode_times: bool,
- decode_timedelta: Optional[bool],
- **open_kwargs,
-) -> Union[Dataset, list[Dataset]]:
- """
- Load in rasterio subdatasets
- """
- dim_groups: dict[Hashable, dict[Hashable, DataArray]] = defaultdict(dict)
- subdataset_filter = None
- if any((group, variable)):
- subdataset_filter = build_subdataset_filter(group, variable)
- for subdataset in riods.subdatasets:
- if subdataset_filter is not None and not subdataset_filter.match(subdataset):
- continue
- with rasterio.open(subdataset) as rds:
- shape = rds.shape
- rioda: DataArray = open_rasterio( # type: ignore
- subdataset,
- parse_coordinates=shape not in dim_groups and parse_coordinates,
- chunks=chunks,
- cache=cache,
- lock=lock,
- masked=masked,
- mask_and_scale=mask_and_scale,
- default_name=subdataset.split(":")[-1].lstrip("/").replace("/", "_"),
- decode_times=decode_times,
- decode_timedelta=decode_timedelta,
- **open_kwargs,
- )
- dim_groups[shape][rioda.name] = rioda
- return _subdataset_groups_to_dataset(
- dim_groups=dim_groups, global_tags=_parse_tags(riods.tags())
- )
-
-
-def _load_bands_as_variables(
- riods: RasterioReader,
- parse_coordinates: bool,
- chunks: Optional[Union[int, tuple, dict]],
- cache: Optional[bool],
- lock: Any,
- masked: bool,
- mask_and_scale: bool,
- decode_times: bool,
- decode_timedelta: Optional[bool],
- vrt_params: Optional[dict],
- **open_kwargs,
-) -> Union[Dataset, list[Dataset]]:
- """
- Load in rasterio bands as variables
- """
- global_tags = _parse_tags(riods.tags())
- data_vars = {}
- for band in riods.indexes:
- band_riods = SingleBandDatasetReader(
- riods=riods,
- bidx=band - 1,
- vrt_params=vrt_params,
- )
- band_name = f"band_{band}"
- data_vars[band_name] = (
- open_rasterio( # type: ignore
- band_riods,
- parse_coordinates=band == 1 and parse_coordinates,
- chunks=chunks,
- cache=cache,
- lock=lock,
- masked=masked,
- mask_and_scale=mask_and_scale,
- default_name=band_name,
- decode_times=decode_times,
- decode_timedelta=decode_timedelta,
- **open_kwargs,
- )
- .squeeze() # type: ignore
- .drop("band") # type: ignore
- )
- dataset = Dataset(data_vars, attrs=global_tags)
-
- def _ds_close():
- for data_var in data_vars.values():
- data_var.close()
-
- dataset.set_close(_ds_close)
- return dataset
-
-
-def _prepare_dask(
- result: DataArray,
- riods: RasterioReader,
- filename: Union[str, os.PathLike],
- chunks: Union[int, tuple, dict],
-) -> DataArray:
- """
- Prepare the data for dask computations
- """
- # pylint: disable=import-outside-toplevel
- from dask.base import tokenize
-
- # augment the token with the file modification time
- try:
- mtime = os.path.getmtime(filename)
- except (TypeError, OSError):
- # the filename is probably an s3 bucket rather than a regular file
- mtime = None
-
- if chunks in (True, "auto"):
- from dask.array.core import normalize_chunks
-
- if not _DASK_GTE_018:
- raise NotImplementedError("Automatic chunking requires dask >= 0.18.0")
- block_shape = (1,) + riods.block_shapes[0]
- chunks = normalize_chunks(
- chunks=(1, "auto", "auto"),
- shape=(riods.count, riods.height, riods.width),
- dtype=_rasterio_to_numpy_dtype(riods.dtypes),
- previous_chunks=tuple((c,) for c in block_shape),
- )
- token = tokenize(filename, mtime, chunks)
- name_prefix = f"open_rasterio-{token}"
- return result.chunk(chunks, name_prefix=name_prefix, token=token)
-
-
-def _handle_encoding(
- result: DataArray,
- mask_and_scale: bool,
- masked: bool,
- da_name: Optional[Hashable],
- unsigned: Union[bool, None],
-) -> None:
- """
- Make sure encoding handled properly
- """
- if "grid_mapping" in result.attrs:
- variables.pop_to(result.attrs, result.encoding, "grid_mapping", name=da_name)
- if mask_and_scale:
- if "scale_factor" in result.attrs:
- variables.pop_to(
- result.attrs, result.encoding, "scale_factor", name=da_name
- )
- if "add_offset" in result.attrs:
- variables.pop_to(result.attrs, result.encoding, "add_offset", name=da_name)
- if masked:
- if "_FillValue" in result.attrs:
- variables.pop_to(result.attrs, result.encoding, "_FillValue", name=da_name)
- if "missing_value" in result.attrs:
- variables.pop_to(
- result.attrs, result.encoding, "missing_value", name=da_name
- )
-
- if mask_and_scale and unsigned is not None and "_FillValue" in result.encoding:
- unsigned_dtype = _get_unsigned_dtype(
- unsigned=unsigned,
- dtype=result.encoding["dtype"],
- )
- if unsigned_dtype is not None:
- result.encoding["_FillValue"] = unsigned_dtype.type(
- result.encoding["_FillValue"]
- )
-
-
-def _single_band_open(*args, bidx=0, **kwargs):
- """
- Open file as if it only has a single band
- """
- return SingleBandDatasetReader(
- riods=rasterio.open(*args, **kwargs),
- bidx=bidx,
- )
-
-
-[docs]def open_rasterio(
- filename: Union[
- str,
- os.PathLike,
- rasterio.io.DatasetReader,
- rasterio.vrt.WarpedVRT,
- SingleBandDatasetReader,
- ],
- parse_coordinates: Optional[bool] = None,
- chunks: Optional[Union[int, tuple, dict]] = None,
- cache: Optional[bool] = None,
- lock: Optional[Any] = None,
- masked: bool = False,
- mask_and_scale: bool = False,
- variable: Optional[Union[str, list[str], tuple[str, ...]]] = None,
- group: Optional[Union[str, list[str], tuple[str, ...]]] = None,
- default_name: Optional[str] = None,
- decode_times: bool = True,
- decode_timedelta: Optional[bool] = None,
- band_as_variable: bool = False,
- **open_kwargs,
-) -> Union[Dataset, DataArray, list[Dataset]]:
- # pylint: disable=too-many-statements,too-many-locals,too-many-branches
- """Open a file with rasterio (experimental).
-
- This should work with any file that rasterio can open (most often:
- geoTIFF). The x and y coordinates are generated automatically from the
- file's geoinformation, shifted to the center of each pixel (see
- `"PixelIsArea" Raster Space
- <http://web.archive.org/web/20160326194152/http://remotesensing.org/geotiff/spec/geotiff2.5.html#2.5.2>`_
- for more information).
-
- .. versionadded:: 0.13 band_as_variable
-
- Parameters
- ----------
- filename: str, rasterio.io.DatasetReader, or rasterio.vrt.WarpedVRT
- Path to the file to open. Or already open rasterio dataset.
- parse_coordinates: bool, optional
- Whether to parse the x and y coordinates out of the file's
- ``transform`` attribute or not. The default is to automatically
- parse the coordinates only if they are rectilinear (1D).
- It can be useful to set ``parse_coordinates=False``
- if your files are very large or if you don't need the coordinates.
- chunks: int, tuple or dict, optional
- Chunk sizes along each dimension, e.g., ``5``, ``(5, 5)`` or
- ``{'x': 5, 'y': 5}``. If chunks is provided, it used to load the new
- DataArray into a dask array. Chunks can also be set to
- ``True`` or ``"auto"`` to choose sensible chunk sizes according to
- ``dask.config.get("array.chunk-size")``.
- cache: bool, optional
- If True, cache data loaded from the underlying datastore in memory as
- NumPy arrays when accessed to avoid reading from the underlying data-
- store multiple times. Defaults to True unless you specify the `chunks`
- argument to use dask, in which case it defaults to False.
- lock: bool or dask.utils.SerializableLock, optional
-
- If chunks is provided, this argument is used to ensure that only one
- thread per process is reading from a rasterio file object at a time.
-
- By default and when a lock instance is provided,
- a :class:`xarray.backends.CachingFileManager` is used to cache File objects.
- Since rasterio also caches some data, this will make repeated reads from the
- same object fast.
-
- When ``lock=False``, no lock is used, allowing for completely parallel reads
- from multiple threads or processes. However, a new file handle is opened on
- each request.
-
- masked: bool, optional
- If True, read the mask and set values to NaN. Defaults to False.
- mask_and_scale: bool, default=False
- Lazily scale (using the `scales` and `offsets` from rasterio) and mask.
- If the _Unsigned attribute is present treat integer arrays as unsigned.
- variable: str or list or tuple, optional
- Variable name or names to use to filter loading.
- group: str or list or tuple, optional
- Group name or names to use to filter loading.
- default_name: str, optional
- The name of the data array if none exists. Default is None.
- decode_times: bool, default=True
- If True, decode times encoded in the standard NetCDF datetime format
- into datetime objects. Otherwise, leave them encoded as numbers.
- decode_timedelta: bool, optional
- If True, decode variables and coordinates with time units in
- {“days”, “hours”, “minutes”, “seconds”, “milliseconds”, “microseconds”}
- into timedelta objects. If False, leave them encoded as numbers.
- If None (default), assume the same value of decode_time.
- band_as_variable: bool, default=False
- If True, will load bands in a raster to separate variables.
- **open_kwargs: kwargs, optional
- Optional keyword arguments to pass into :func:`rasterio.open`.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray` | list[:obj:`xarray.Dataset`]:
- The newly created dataset(s).
- """
- parse_coordinates = True if parse_coordinates is None else parse_coordinates
- masked = masked or mask_and_scale
- vrt_params = None
- file_opener = rasterio.open
- if isinstance(filename, SingleBandDatasetReader):
- file_opener = functools.partial(
- _single_band_open,
- bidx=filename._bidx,
- )
- vrt_params = filename._vrt_params
- if isinstance(filename, (rasterio.io.DatasetReader, SingleBandDatasetReader)):
- filename = filename.name
- elif isinstance(filename, rasterio.vrt.WarpedVRT):
- vrt = filename
- filename = vrt.src_dataset.name
- vrt_params = {
- "src_crs": vrt.src_crs.to_string() if vrt.src_crs else None,
- "crs": vrt.dst_crs.to_string() if vrt.dst_crs else None,
- "resampling": vrt.resampling,
- "tolerance": vrt.tolerance,
- "src_nodata": vrt.src_nodata,
- "nodata": vrt.dst_nodata,
- "width": vrt.dst_width,
- "height": vrt.dst_height,
- "src_transform": vrt.src_transform,
- "transform": vrt.dst_transform,
- "dtype": vrt.working_dtype,
- **vrt.warp_extras,
- }
-
- if lock in (True, None):
- lock = RASTERIO_LOCK
- elif lock is False:
- lock = NO_LOCK
-
- # ensure default for sharing is False
- # ref https://github.com/mapbox/rasterio/issues/1504
- open_kwargs["sharing"] = open_kwargs.get("sharing", False)
-
- with warnings.catch_warnings(record=True) as rio_warnings:
- if lock is not NO_LOCK and isinstance(filename, (str, os.PathLike)):
- manager: FileManager = CachingFileManager(
- file_opener, filename, lock=lock, mode="r", kwargs=open_kwargs
- )
- else:
- manager = URIManager(file_opener, filename, mode="r", kwargs=open_kwargs)
- riods = manager.acquire()
- captured_warnings = rio_warnings.copy()
-
- # raise the NotGeoreferencedWarning if applicable
- for rio_warning in captured_warnings:
- if not riods.subdatasets or not isinstance(
- rio_warning.message, NotGeoreferencedWarning
- ):
- warnings.warn(str(rio_warning.message), type(rio_warning.message)) # type: ignore
-
- # open the subdatasets if they exist
- if riods.subdatasets:
- subdataset_result = _load_subdatasets(
- riods=riods,
- group=group,
- variable=variable,
- parse_coordinates=parse_coordinates,
- chunks=chunks,
- cache=cache,
- lock=lock,
- masked=masked,
- mask_and_scale=mask_and_scale,
- decode_times=decode_times,
- decode_timedelta=decode_timedelta,
- **open_kwargs,
- )
- manager.close()
- return subdataset_result
-
- if band_as_variable:
- dataset_result = _load_bands_as_variables(
- riods=riods,
- parse_coordinates=parse_coordinates,
- chunks=chunks,
- cache=cache,
- lock=lock,
- masked=masked,
- mask_and_scale=mask_and_scale,
- decode_times=decode_times,
- decode_timedelta=decode_timedelta,
- vrt_params=vrt_params,
- **open_kwargs,
- )
- manager.close()
- return dataset_result
-
- if cache is None:
- cache = chunks is None
-
- riods = _ensure_warped_vrt(riods, vrt_params)
-
- # Get bands
- if riods.count < 1:
- raise ValueError("Unknown dims")
-
- # parse tags & load alternate coords
- attrs = _get_rasterio_attrs(riods=riods)
- coords = _load_netcdf_1d_coords(attrs)
- _parse_driver_tags(riods=riods, attrs=attrs, coords=coords)
- for coord in coords:
- if f"NETCDF_DIM_{coord}" in attrs:
- coord_name = coord
- attrs.pop(f"NETCDF_DIM_{coord}")
- break
- if f"NETCDF_DIM_{coord}_VALUES" in attrs:
- coord_name = coord
- attrs.pop(f"NETCDF_DIM_{coord}_VALUES")
- attrs.pop(f"NETCDF_DIM_{coord}_DEF", None)
- attrs.pop("NETCDF_DIM_EXTRA", None)
- break
- else:
- coord_name = "band"
- coords[coord_name] = numpy.asarray(riods.indexes)
-
- has_gcps = riods.gcps[0]
- if has_gcps:
- parse_coordinates = False
-
- # Get geospatial coordinates
- if parse_coordinates:
- coords.update(
- _generate_spatial_coords(riods.transform, riods.width, riods.height)
- )
-
- unsigned = None
- encoding: dict[Hashable, Any] = {}
- if mask_and_scale and "_Unsigned" in attrs:
- unsigned = variables.pop_to(attrs, encoding, "_Unsigned") == "true"
-
- if masked:
- encoding["dtype"] = str(_rasterio_to_numpy_dtype(riods.dtypes))
-
- da_name = attrs.pop("NETCDF_VARNAME", default_name)
- data: Any = indexing.LazilyOuterIndexedArray(
- RasterioArrayWrapper(
- manager,
- lock,
- name=da_name,
- vrt_params=vrt_params,
- masked=masked,
- mask_and_scale=mask_and_scale,
- unsigned=unsigned,
- )
- )
-
- # this lets you write arrays loaded with rasterio
- data = indexing.CopyOnWriteArray(data)
- if cache and chunks is None:
- data = indexing.MemoryCachedArray(data)
-
- result = DataArray(
- data=data, dims=(coord_name, "y", "x"), coords=coords, attrs=attrs, name=da_name
- )
- result.encoding = encoding
-
- # update attributes from NetCDF attributes
- _load_netcdf_attrs(riods.tags(), result)
- result = _decode_datetime_cf(
- result, decode_times=decode_times, decode_timedelta=decode_timedelta
- )
-
- # make sure the _FillValue is correct dtype
- if "_FillValue" in result.attrs:
- result.attrs["_FillValue"] = result.dtype.type(result.attrs["_FillValue"])
-
- # handle encoding
- _handle_encoding(result, mask_and_scale, masked, da_name, unsigned=unsigned)
- # Affine transformation matrix (always available)
- # This describes coefficients mapping pixel coordinates to CRS
- # For serialization store as tuple of 6 floats, the last row being
- # always (0, 0, 1) per definition (see
- # https://github.com/sgillies/affine)
- result.rio.write_transform(riods.transform, inplace=True)
- rio_crs = riods.crs or result.rio.crs
- if rio_crs:
- result.rio.write_crs(rio_crs, inplace=True)
- if has_gcps:
- result.rio.write_gcps(*riods.gcps, inplace=True)
-
- if chunks is not None:
- result = _prepare_dask(result, riods, filename, chunks)
- else:
- result.encoding["preferred_chunks"] = {
- result.rio.y_dim: riods.block_shapes[0][0],
- result.rio.x_dim: riods.block_shapes[0][1],
- coord_name: 1,
- }
-
- # add file path to encoding
- result.encoding["source"] = riods.name
- result.encoding["rasterio_dtype"] = str(riods.dtypes[0])
- # remove duplicate coordinate information
- for coord in result.coords:
- result.attrs = {
- attr: value
- for attr, value in result.attrs.items()
- if not attr.startswith(f"{coord}#")
- }
- # remove duplicate tags
- if result.name:
- result.attrs = {
- attr: value
- for attr, value in result.attrs.items()
- if not attr.startswith(f"{result.name}#")
- }
- # Make the file closeable
- result.set_close(manager.close)
- result.rio._manager = manager
- return result
-
-"""
-This file contains global options for rioxarray
-
-Credits:
-
-This file was adopted from: https://github.com/pydata/xarray # noqa
-Source file: https://github.com/pydata/xarray/blob/2ab0666c1fcc493b1e0ebc7db14500c427f8804e/xarray/core/options.py # noqa
-"""
-from typing import Any
-
-EXPORT_GRID_MAPPING = "export_grid_mapping"
-SKIP_MISSING_SPATIAL_DIMS = "skip_missing_spatial_dims"
-
-OPTIONS = {
- EXPORT_GRID_MAPPING: True,
- SKIP_MISSING_SPATIAL_DIMS: False,
-}
-OPTION_NAMES = set(OPTIONS)
-
-VALIDATORS = {
- EXPORT_GRID_MAPPING: lambda choice: isinstance(choice, bool),
-}
-
-
-def get_option(key: str) -> Any:
- """
- Get the global rioxarray option.
-
- .. versionadded:: 0.3.0
-
- Parameters
- ----------
- key: str
- The name of the option.
-
- Returns
- -------
- Any: the value of the option.
- """
- return OPTIONS[key]
-
-
-[docs]class set_options: # pylint: disable=invalid-name
- """
- Set the global rioxarray option.
-
- .. versionadded:: 0.3.0
- .. versionadded:: 0.7.0 skip_missing_spatial_dims
-
- Parameters
- ----------
- export_grid_mapping: bool, default=True
- If True, this option will export the full Climate and Forecasts (CF)
- grid mapping attributes for the CRS. This is useful if you are exporting
- your file to netCDF using :meth:`xarray.Dataset.to_netcdf()`. When disabled,
- only the ``crs_wkt`` and ``spatial_ref`` attributes will be written and the
- program will be faster due to not needing to use
- :meth:`pyproj.CRS.to_cf() <pyproj.crs.CRS.to_cf>`.
- skip_missing_spatial_dims: bool, default=False
- If True, it will not perform spatial operations on variables
- within a :class:`xarray.Dataset` if the spatial dimensions
- are not found.
-
-
- Usage as a context manager::
-
- with rioxarray.set_options(export_grid_mapping=False):
- rds = rioxarray.open_rasterio(...)
-
- Usage for global settings::
-
- rioxarray.set_options(export_grid_mapping=False)
-
- """
-
- def __init__(self, **kwargs):
- self.old = OPTIONS.copy()
- for key, value in kwargs.items():
- if key not in OPTIONS:
- raise ValueError(
- f"argument name {key} is not in the set of valid options "
- f"{OPTION_NAMES}."
- )
- if key in VALIDATORS and not VALIDATORS[key](value):
- raise ValueError(f"option {key!r} gave an invalid value: {value!r}.")
- OPTIONS[key] = value
-
- def __enter__(self):
- return
-
- def __exit__(self, exc_type, exc_value, traceback):
- global OPTIONS # pylint: disable=global-statement
- OPTIONS = self.old
-
-"""
-Utility methods to print system info for debugging
-
-adapted from :func:`sklearn.utils._show_versions`
-which was adapted from :func:`pandas.show_versions`
-"""
-# pylint: disable=import-outside-toplevel
-import importlib.metadata
-import os
-import platform
-import sys
-
-
-def _get_sys_info() -> dict[str, str]:
- """System information
- Return
- ------
- sys_info : dict
- system and Python version information
- """
- blob = [
- ("python", sys.version.replace("\n", " ")),
- ("executable", sys.executable),
- ("machine", platform.platform()),
- ]
-
- return dict(blob)
-
-
-def _get_main_info() -> dict[str, str]:
- """Get the main dependency information to hightlight.
-
- Returns
- -------
- proj_info: dict
- system GDAL information
- """
- import rasterio
-
- try:
- proj_data = os.pathsep.join(rasterio._env.get_proj_data_search_paths())
- except AttributeError:
- proj_data = None
- try:
- gdal_data = rasterio._env.get_gdal_data()
- except AttributeError:
- gdal_data = None
-
- blob = [
- ("rasterio", importlib.metadata.version("rasterio")),
- ("xarray", importlib.metadata.version("xarray")),
- ("GDAL", rasterio.__gdal_version__),
- ("GEOS", getattr(rasterio, "__geos_version__", None)),
- ("PROJ", getattr(rasterio, "__proj_version__", None)),
- ("PROJ DATA", proj_data),
- ("GDAL DATA", gdal_data),
- ]
-
- return dict(blob)
-
-
-def _get_deps_info() -> dict[str, str]:
- """Overview of the installed version of dependencies
- Returns
- -------
- deps_info: dict
- version information on relevant Python libraries
- """
- deps = ["scipy", "pyproj"]
-
- def get_version(module):
- try:
- return importlib.metadata.version(module)
- except importlib.metadata.PackageNotFoundError:
- return None
-
- return {dep: get_version(dep) for dep in deps}
-
-
-def _print_info_dict(info_dict: dict[str, str]) -> None:
- """Print the information dictionary"""
- for key, stat in info_dict.items():
- print(f"{key:>10}: {stat}")
-
-
-[docs]def show_versions() -> None:
- """
- .. versionadded:: 0.0.26
-
- Print useful debugging information
-
- Example
- -------
- > python -c "import rioxarray; rioxarray.show_versions()"
-
- """
- print(f"rioxarray ({importlib.metadata.version('rioxarray')}) deps:")
- _print_info_dict(_get_main_info())
- print("\nOther python deps:")
- _print_info_dict(_get_deps_info())
- print("\nSystem:")
- _print_info_dict(_get_sys_info())
-
-"""
-This contains exceptions for rioxarray.
-"""
-
-
-[docs]class RioXarrayError(RuntimeError):
- """This is the base exception for errors in the rioxarray extension."""
-
-
-[docs]class NoDataInBounds(RioXarrayError):
- """This is for when there are no data in the bounds for clipping a raster."""
-
-
-[docs]class SingleVariableDataset(RioXarrayError):
- """This is for when you have a dataset with a single variable."""
-
-
-[docs]class DimensionError(RioXarrayError):
- """This is raised when there are more dimensions than is supported by the method"""
-
-
-[docs]class MissingSpatialDimensionError(DimensionError):
- """This is raised when the dimension cannot be found"""
-
-
-[docs]class TooManyDimensions(DimensionError):
- """This is raised when there are more dimensions than is supported by the method"""
-
-
-[docs]class InvalidDimensionOrder(DimensionError):
- """This is raised when there the dimensions are not ordered correctly."""
-
-
-[docs]class OneDimensionalRaster(DimensionError):
- """This is an error when you have a 1 dimensional raster."""
-
-
-[docs]class DimensionMissingCoordinateError(RioXarrayError):
- """This is raised when the dimension does not have the supporting coordinate."""
-
-
-
-
-"""
-This module allows you to merge xarray Datasets/DataArrays
-geospatially with the `rasterio.merge` module.
-"""
-
-from collections.abc import Sequence
-from typing import Callable, Optional, Union
-
-import numpy
-from rasterio.crs import CRS
-from rasterio.merge import merge as _rio_merge
-from xarray import DataArray, Dataset
-
-from rioxarray.rioxarray import _get_nonspatial_coords, _make_coords
-
-
-class RasterioDatasetDuck:
- """
- This class is to provide the attributes and methods necessary
- to make the :func:`rasterio.merge.merge` function think that
- the :obj:`xarray.DataArray` is a :obj:`rasterio.io.DatasetReader`.
- """
-
- # pylint: disable=too-many-instance-attributes
-
- def __init__(self, xds: DataArray):
- self._xds = xds
- self.crs = xds.rio.crs
- self.bounds = xds.rio.bounds(recalc=True)
- self.count = int(xds.rio.count)
- self.dtypes = [xds.dtype]
- self.name = xds.name
- self.nodatavals = [xds.rio.nodata]
- res = xds.rio.resolution(recalc=True)
- self.res = (abs(res[0]), abs(res[1]))
- self.transform = xds.rio.transform(recalc=True)
- try:
- rio_file = xds.rio._manager.acquire()
- self.profile = rio_file.profile
- except AttributeError:
- self.profile = {}
- self.profile.update(
- dtype=xds.dtype,
- crs=xds.rio.crs,
- nodata=xds.rio.nodata,
- )
-
- def colormap(self, *args, **kwargs):
- """
- Lazy load colormap through _manager.acquire()
- for the scenario many file handles are opened
-
- See: https://github.com/corteva/rioxarray/issues/479
- """
- try:
- rio_file = self.xds.rio._manager.acquire()
- return rio_file.colormap(*args, **kwargs)
- except AttributeError:
- return None
-
- def read(self, window, out_shape, *args, **kwargs) -> numpy.ma.MaskedArray:
- # pylint: disable=unused-argument
- """
- This method is meant to be used by the rasterio.merge.merge function.
- """
- data_window = self._xds.rio.isel_window(window)
- if data_window.shape != out_shape:
- # in this section, the data is geographically the same
- # however it is not the same dimensions as requested
- # so need to resample to the requested shape
- if len(out_shape) == 3:
- _, out_height, out_width = out_shape
- else:
- out_height, out_width = out_shape
- data_window = self._xds.rio.reproject(
- self._xds.rio.crs,
- transform=self.transform,
- shape=(out_height, out_width),
- )
-
- nodata = self.nodatavals[0]
- mask = False
- fill_value = None
- if nodata is not None and numpy.isnan(nodata):
- mask = numpy.isnan(data_window)
- elif nodata is not None:
- mask = data_window == nodata
- fill_value = nodata
-
- # make sure the returned shape matches
- # the expected shape. This can be the case
- # when the xarray dataset was squeezed to 2D beforehand
- if len(out_shape) == 3 and len(data_window.shape) == 2:
- data_window = data_window.values.reshape((1, out_height, out_width))
-
- return numpy.ma.array(
- data_window, mask=mask, fill_value=fill_value, dtype=self.dtypes[0]
- )
-
-
-[docs]def merge_arrays(
- dataarrays: Sequence[DataArray],
- bounds: Optional[tuple] = None,
- res: Optional[tuple] = None,
- nodata: Optional[float] = None,
- precision: Optional[float] = None,
- method: Union[str, Callable, None] = None,
- crs: Optional[CRS] = None,
- parse_coordinates: bool = True,
-) -> DataArray:
- """
- Merge data arrays geospatially.
-
- Uses :func:`rasterio.merge.merge`
-
- .. versionadded:: 0.2 crs
-
- Parameters
- ----------
- dataarrays: list[xarray.DataArray]
- List of xarray.DataArray's with all geo attributes.
- The first one is assumed to have the same
- CRS, dtype, and dimensions as the others in the array.
- bounds: tuple, optional
- Bounds of the output image (left, bottom, right, top).
- If not set, bounds are determined from bounds of input DataArrays.
- res: tuple, optional
- Output resolution in units of coordinate reference system.
- If not set, the resolution of the first DataArray is used.
- If a single value is passed, output pixels will be square.
- nodata: float, optional
- nodata value to use in output file.
- If not set, uses the nodata value in the first input DataArray.
- precision: float, optional
- Number of decimal points of precision when computing inverse transform.
- method: str or callable, optional
- See :func:`rasterio.merge.merge` for details.
- crs: rasterio.crs.CRS, optional
- Output CRS. If not set, the CRS of the first DataArray is used.
- parse_coordinates: bool, optional
- If False, it will disable loading spatial coordinates.
-
- Returns
- -------
- :obj:`xarray.DataArray`:
- The geospatially merged data.
- """
- input_kwargs = {
- "bounds": bounds,
- "res": res,
- "nodata": nodata,
- "precision": precision,
- "method": method,
- }
-
- if crs is None:
- crs = dataarrays[0].rio.crs
- if res is None:
- res = tuple(abs(res_val) for res_val in dataarrays[0].rio.resolution())
-
- # prepare the duck arrays
- rioduckarrays = []
- for dataarray in dataarrays:
- da_res = tuple(abs(res_val) for res_val in dataarray.rio.resolution())
- if da_res != res or dataarray.rio.crs != crs:
- rioduckarrays.append(
- RasterioDatasetDuck(
- dataarray.rio.reproject(dst_crs=crs, resolution=res)
- )
- )
- else:
- rioduckarrays.append(RasterioDatasetDuck(dataarray))
-
- # use rasterio to merge
- merged_data, merged_transform = _rio_merge(
- rioduckarrays,
- **{key: val for key, val in input_kwargs.items() if val is not None},
- )
- # generate merged data array
- representative_array = rioduckarrays[0]._xds
- if parse_coordinates:
- coords = _make_coords(
- representative_array,
- merged_transform,
- merged_data.shape[-1],
- merged_data.shape[-2],
- )
- else:
- coords = _get_nonspatial_coords(representative_array)
-
- # make sure the output merged data shape is 2D if the
- # original data was 2D. this can happen if the
- # xarray datasarray was squeezed.
- if len(merged_data.shape) == 3 and len(representative_array.shape) == 2:
- merged_data = merged_data.squeeze()
-
- xda = DataArray(
- name=representative_array.name,
- data=merged_data,
- coords=coords,
- dims=tuple(representative_array.dims),
- attrs=representative_array.attrs,
- )
- xda.rio.write_nodata(
- nodata if nodata is not None else representative_array.rio.nodata, inplace=True
- )
- xda.rio.write_crs(representative_array.rio.crs, inplace=True)
- xda.rio.write_transform(merged_transform, inplace=True)
- return xda
-
-
-[docs]def merge_datasets(
- datasets: Sequence[Dataset],
- bounds: Optional[tuple] = None,
- res: Optional[tuple] = None,
- nodata: Optional[float] = None,
- precision: Optional[float] = None,
- method: Union[str, Callable, None] = None,
- crs: Optional[CRS] = None,
-) -> Dataset:
- """
- Merge datasets geospatially.
-
- Uses :func:`rasterio.merge.merge`
-
- .. versionadded:: 0.2 crs
-
- Parameters
- ----------
- datasets: list[xarray.Dataset]
- List of xarray.Dataset's with all geo attributes.
- The first one is assumed to have the same
- CRS, dtype, dimensions, and data_vars as the others in the array.
- bounds: tuple, optional
- Bounds of the output image (left, bottom, right, top).
- If not set, bounds are determined from bounds of input Dataset.
- res: tuple, optional
- Output resolution in units of coordinate reference system.
- If not set, the resolution of the first Dataset is used.
- If a single value is passed, output pixels will be square.
- nodata: float, optional
- nodata value to use in output file.
- If not set, uses the nodata value in the first input Dataset.
- precision: float, optional
- Number of decimal points of precision when computing inverse transform.
- method: str or callable, optional
- See rasterio docs.
- crs: rasterio.crs.CRS, optional
- Output CRS. If not set, the CRS of the first DataArray is used.
-
- Returns
- -------
- :obj:`xarray.Dataset`:
- The geospatially merged data.
- """
-
- representative_ds = datasets[0]
- merged_data = {}
- for data_var in representative_ds.data_vars:
- merged_data[data_var] = merge_arrays(
- [dataset[data_var] for dataset in datasets],
- bounds=bounds,
- res=res,
- nodata=nodata,
- precision=precision,
- method=method,
- crs=crs,
- parse_coordinates=False,
- )
- data_var = list(representative_ds.data_vars)[0]
- xds = Dataset(
- merged_data,
- coords=_make_coords(
- merged_data[data_var],
- merged_data[data_var].rio.transform(),
- merged_data[data_var].shape[-1],
- merged_data[data_var].shape[-2],
- force_generate=True,
- ),
- attrs=representative_ds.attrs,
- )
- xds.rio.write_crs(merged_data[data_var].rio.crs, inplace=True)
- return xds
-
-"""
-This module is an extension for xarray to provide rasterio capabilities
-to xarray dataarrays.
-
-Credits: The `reproject` functionality was adopted from https://github.com/opendatacube/datacube-core # noqa: E501
-Source file:
-- https://github.com/opendatacube/datacube-core/blob/084c84d78cb6e1326c7fbbe79c5b5d0bef37c078/datacube/api/geo_xarray.py # noqa: E501
-datacube is licensed under the Apache License, Version 2.0:
-- https://github.com/opendatacube/datacube-core/blob/1d345f08a10a13c316f81100936b0ad8b1a374eb/LICENSE # noqa: E501
-
-"""
-import copy
-import os
-from collections.abc import Hashable, Iterable, Mapping
-from pathlib import Path
-from typing import Any, Literal, Optional, Union
-
-import numpy
-import rasterio
-import rasterio.mask
-import rasterio.warp
-import xarray
-from affine import Affine
-from rasterio.dtypes import dtype_rev
-from rasterio.enums import Resampling
-from rasterio.features import geometry_mask
-from xarray.backends.file_manager import FileManager
-from xarray.core.dtypes import get_fill_value
-
-from rioxarray._io import FILL_VALUE_NAMES, UNWANTED_RIO_ATTRS
-from rioxarray.crs import crs_from_user_input
-from rioxarray.exceptions import (
- MissingCRS,
- NoDataInBounds,
- OneDimensionalRaster,
- RioXarrayError,
-)
-from rioxarray.raster_writer import RasterioWriter, _ensure_nodata_dtype
-from rioxarray.rioxarray import (
- XRasterBase,
- _get_data_var_message,
- _make_coords,
- _order_bounds,
-)
-
-# DTYPE TO NODATA MAP
-# Based on: https://github.com/OSGeo/gdal/blob/
-# dee861e7c91c2da7ef8ff849947713e4d9bd115c/
-# swig/python/gdal-utils/osgeo_utils/gdal_calc.py#L61
-_NODATA_DTYPE_MAP = {
- 1: 255, # GDT_Byte
- 2: 65535, # GDT_UInt16
- 3: -32768, # GDT_Int16
- 4: 4294967293, # GDT_UInt32
- 5: -2147483647, # GDT_Int32
- 6: 3.402823466e38, # GDT_Float32
- 7: 1.7976931348623158e308, # GDT_Float64
- 8: None, # GDT_CInt16
- 9: None, # GDT_CInt32
- 10: 3.402823466e38, # GDT_CFloat32
- 11: 1.7976931348623158e308, # GDT_CFloat64
- 12: None, # GDT_Int64
- 13: None, # GDT_UInt64
- 14: None, # GDT_Int8
-}
-
-
-def _generate_attrs(
- src_data_array: xarray.DataArray, dst_nodata: Optional[float]
-) -> dict[str, Any]:
- # add original attributes
- new_attrs = copy.deepcopy(src_data_array.attrs)
- # remove all nodata information
- for unwanted_attr in FILL_VALUE_NAMES + UNWANTED_RIO_ATTRS:
- new_attrs.pop(unwanted_attr, None)
-
- # add nodata information
- fill_value = (
- src_data_array.rio.nodata
- if src_data_array.rio.nodata is not None
- else dst_nodata
- )
- if src_data_array.rio.encoded_nodata is None and fill_value is not None:
- new_attrs["_FillValue"] = fill_value
-
- return new_attrs
-
-
-def _add_attrs_proj(
- new_data_array: xarray.DataArray, src_data_array: xarray.DataArray
-) -> xarray.DataArray:
- """Make sure attributes and projection correct"""
- # make sure dimension information is preserved
- if new_data_array.rio._x_dim is None:
- new_data_array.rio._x_dim = src_data_array.rio.x_dim
- if new_data_array.rio._y_dim is None:
- new_data_array.rio._y_dim = src_data_array.rio.y_dim
-
- # make sure attributes preserved
- new_attrs = _generate_attrs(src_data_array, None)
- # remove fill value if it already exists in the encoding
- # this is for data arrays pulling the encoding from a
- # source data array instead of being generated anew.
- if "_FillValue" in new_data_array.encoding:
- new_attrs.pop("_FillValue", None)
-
- new_data_array.rio.set_attrs(new_attrs, inplace=True)
-
- # make sure projection added
- new_data_array.rio.write_grid_mapping(src_data_array.rio.grid_mapping, inplace=True)
- new_data_array.rio.write_crs(src_data_array.rio.crs, inplace=True)
- new_data_array.rio.write_coordinate_system(inplace=True)
- new_data_array.rio.write_transform(inplace=True)
- # make sure encoding added
- new_data_array.encoding = src_data_array.encoding.copy()
- return new_data_array
-
-
-def _make_dst_affine(
- src_data_array: xarray.DataArray,
- src_crs: rasterio.crs.CRS,
- dst_crs: rasterio.crs.CRS,
- dst_resolution: Optional[Union[float, tuple[float, float]]] = None,
- dst_shape: Optional[tuple[float, float]] = None,
- **kwargs,
-):
- """Determine the affine of the new projected `xarray.DataArray`"""
- src_bounds = () if "gcps" in kwargs else src_data_array.rio.bounds()
- src_height, src_width = src_data_array.rio.shape
- dst_height, dst_width = dst_shape if dst_shape is not None else (None, None)
- # pylint: disable=isinstance-second-argument-not-valid-type
- if isinstance(dst_resolution, Iterable):
- dst_resolution = tuple(abs(res_val) for res_val in dst_resolution) # type: ignore
- elif dst_resolution is not None:
- dst_resolution = abs(dst_resolution) # type: ignore
-
- for key, value in (
- ("resolution", dst_resolution),
- ("dst_height", dst_height),
- ("dst_width", dst_width),
- ):
- if value is not None:
- kwargs[key] = value
- dst_affine, dst_width, dst_height = rasterio.warp.calculate_default_transform(
- src_crs,
- dst_crs,
- src_width,
- src_height,
- *src_bounds,
- **kwargs,
- )
- return dst_affine, dst_width, dst_height
-
-
-def _clip_from_disk(
- xds: xarray.DataArray,
- geometries: Iterable,
- all_touched: bool,
- drop: bool,
- invert: bool,
-) -> Optional[xarray.DataArray]:
- """
- clip from disk if the file object is available
- """
- try:
- out_image, out_transform = rasterio.mask.mask(
- xds.rio._manager.acquire(),
- geometries,
- all_touched=all_touched,
- invert=invert,
- crop=drop,
- )
- if xds.rio.encoded_nodata is not None and not numpy.isnan(
- xds.rio.encoded_nodata
- ):
- out_image = out_image.astype(numpy.float64)
- out_image[out_image == xds.rio.encoded_nodata] = numpy.nan
-
- height, width = out_image.shape[-2:]
- cropped_ds = xarray.DataArray(
- name=xds.name,
- data=out_image,
- coords=_make_coords(xds, out_transform, width, height),
- dims=xds.dims,
- attrs=xds.attrs,
- )
- cropped_ds.encoding = xds.encoding
- return cropped_ds
- except AttributeError:
- return None
-
-
-def _clip_xarray(
- xds: xarray.DataArray,
- geometries: Iterable,
- all_touched: bool,
- drop: bool,
- invert: bool,
-) -> xarray.DataArray:
- """
- clip the xarray DataArray
- """
- clip_mask_arr = geometry_mask(
- geometries=geometries,
- out_shape=(int(xds.rio.height), int(xds.rio.width)),
- transform=xds.rio.transform(recalc=True),
- invert=not invert,
- all_touched=all_touched,
- )
- clip_mask_xray = xarray.DataArray(
- clip_mask_arr,
- dims=(xds.rio.y_dim, xds.rio.x_dim),
- )
- cropped_ds = xds.where(clip_mask_xray)
- if drop:
- cropped_ds.rio.set_spatial_dims(
- x_dim=xds.rio.x_dim, y_dim=xds.rio.y_dim, inplace=True
- )
- cropped_ds = cropped_ds.rio.isel_window(
- rasterio.windows.get_data_window(
- numpy.ma.masked_array(clip_mask_arr, ~clip_mask_arr)
- )
- )
- if xds.rio.nodata is not None and not numpy.isnan(xds.rio.nodata):
- cropped_ds = cropped_ds.fillna(xds.rio.nodata)
-
- return cropped_ds.astype(xds.dtype)
-
-
-[docs]@xarray.register_dataarray_accessor("rio")
-class RasterArray(XRasterBase):
- """This is the GIS extension for :obj:`xarray.DataArray`"""
-
- def __init__(self, xarray_obj: xarray.DataArray):
- super().__init__(xarray_obj)
- self._obj: xarray.DataArray
- # properties
- self._nodata: Optional[float] = None
- self._manager: Optional[
- FileManager
- ] = None # https://github.com/corteva/rioxarray/issues/254
-
-[docs] def set_nodata(
- self, input_nodata: Optional[float], inplace: bool = True
- ) -> xarray.DataArray:
- """
- Set the nodata value for the DataArray without modifying
- the data array.
-
- Parameters
- ----------
- input_nodata: Optional[float]
- Valid nodata for dtype.
- inplace: bool, optional
- If True, it will write to the existing dataset. Default is True.
-
- Returns
- -------
- :obj:`xarray.DataArray`:
- Dataset with nodata attribute set.
- """
- obj: xarray.DataArray = self._get_obj(inplace=inplace) # type: ignore
- obj.rio._nodata = input_nodata
- return obj
-
-[docs] def write_nodata(
- self, input_nodata: Optional[float], encoded: bool = False, inplace=False
- ) -> xarray.DataArray:
- """
- Write the nodata to the DataArray in a CF compliant manner.
-
- Parameters
- ----------
- input_nodata: Optional[float]
- Nodata value for the DataArray.
- If input_nodata is None, it will remove the _FillValue attribute.
- encoded: bool, optional
- If True, it will write the nodata value in the encoding and remove
- the fill value from the attributes. This is useful for masking
- with nodata. Default is False.
- inplace: bool, optional
- If True, it will write to the existing DataArray. Default is False.
-
- Returns
- -------
- :obj:`xarray.DataArray`:
- Modified DataArray with CF compliant nodata information.
-
- Examples
- --------
- To write the nodata value if it is missing:
-
- >>> raster.rio.write_nodata(-9999, inplace=True)
-
- To write the nodata value on a copy:
-
- >>> raster = raster.rio.write_nodata(-9999)
-
- To mask with nodata:
-
- >>> nodata = raster.rio.nodata
- >>> raster = raster.where(raster != nodata)
- >>> raster.rio.write_nodata(nodata, encoded=True, inplace=True)
-
- """
- data_obj: xarray.DataArray = self._get_obj(inplace=inplace) # type: ignore
- input_nodata = False if input_nodata is None else input_nodata
- if input_nodata is not False:
- input_nodata = _ensure_nodata_dtype(input_nodata, self._obj.dtype)
- if encoded:
- data_obj.rio.update_encoding({"_FillValue": input_nodata}, inplace=True)
- else:
- data_obj.rio.update_attrs({"_FillValue": input_nodata}, inplace=True)
- if input_nodata is False or encoded:
- new_attrs = dict(data_obj.attrs)
- new_attrs.pop("_FillValue", None)
- data_obj.rio.set_attrs(new_attrs, inplace=True)
- if input_nodata is False and encoded:
- new_encoding = dict(data_obj.encoding)
- new_encoding.pop("_FillValue", None)
- data_obj.rio.set_encoding(new_encoding, inplace=True)
- if not encoded:
- data_obj.rio.set_nodata(input_nodata, inplace=True)
- return data_obj
-
- @property
- def encoded_nodata(self) -> Optional[float]:
- """Return the encoded nodata value for the dataset if encoded."""
- encoded_nodata = self._obj.encoding.get("_FillValue")
- if encoded_nodata is None:
- return None
- return _ensure_nodata_dtype(encoded_nodata, self._obj.dtype)
-
- @property
- def nodata(self) -> Optional[float]:
- """Get the nodata value for the dataset."""
- if self._nodata is not None:
- return None if self._nodata is False else self._nodata
-
- if self.encoded_nodata is not None:
- self._nodata = get_fill_value(self._obj.dtype)
- else:
- self._nodata = self._obj.attrs.get(
- "_FillValue",
- self._obj.attrs.get(
- "missing_value",
- self._obj.attrs.get("fill_value", self._obj.attrs.get("nodata")),
- ),
- )
-
- # look in places used by `xarray.open_rasterio`
- if self._nodata is None:
- try:
- self._nodata = self._manager.acquire().nodata # type: ignore
- except AttributeError:
- try:
- self._nodata = self._obj.attrs["nodatavals"][0]
- except (KeyError, IndexError):
- pass
-
- if self._nodata is None:
- self._nodata = False
- return None
-
- self._nodata = _ensure_nodata_dtype(self._nodata, self._obj.dtype)
- return self._nodata
-
-[docs] def reproject(
- self,
- dst_crs: Any,
- resolution: Optional[Union[float, tuple[float, float]]] = None,
- shape: Optional[tuple[int, int]] = None,
- transform: Optional[Affine] = None,
- resampling: Resampling = Resampling.nearest,
- nodata: Optional[float] = None,
- **kwargs,
- ) -> xarray.DataArray:
- """
- Reproject :obj:`xarray.DataArray` objects
-
- Powered by :func:`rasterio.warp.reproject`
-
- .. note:: Only 2D/3D arrays with dimensions 'x'/'y' are currently supported.
- Requires either a grid mapping variable with 'spatial_ref' or
- a 'crs' attribute to be set containing a valid CRS.
- If using a WKT (e.g. from spatiareference.org), make sure it is an OGC WKT.
-
- .. note:: To re-project with dask, see
- `odc-geo <https://odc-geo.readthedocs.io/>`__ &
- `pyresample <https://pyresample.readthedocs.io/>`__.
-
- .. versionadded:: 0.0.27 shape
- .. versionadded:: 0.0.28 transform
- .. versionadded:: 0.5.0 nodata, kwargs
-
- Parameters
- ----------
- dst_crs: str
- OGC WKT string or Proj.4 string.
- resolution: float or tuple(float, float), optional
- Size of a destination pixel in destination projection units
- (e.g. degrees or metres).
- shape: tuple(int, int), optional
- Shape of the destination in pixels (dst_height, dst_width). Cannot be used
- together with resolution.
- transform: Affine, optional
- The destination transform.
- resampling: rasterio.enums.Resampling, optional
- See :func:`rasterio.warp.reproject` for more details.
- nodata: float, optional
- The nodata value used to initialize the destination;
- it will remain in all areas not covered by the reprojected source.
- Defaults to the nodata value of the source image if none provided
- and exists or attempts to find an appropriate value by dtype.
- **kwargs: dict
- Additional keyword arguments to pass into :func:`rasterio.warp.reproject`.
- To override:
- - src_transform: `rio.write_transform`
- - src_crs: `rio.write_crs`
- - src_nodata: `rio.write_nodata`
-
-
- Returns
- -------
- :obj:`xarray.DataArray`:
- The reprojected DataArray.
- """
- if resolution is not None and (shape is not None or transform is not None):
- raise RioXarrayError("resolution cannot be used with shape or transform.")
- if self.crs is None:
- raise MissingCRS(
- "CRS not found. Please set the CRS with 'rio.write_crs()'."
- f"{_get_data_var_message(self._obj)}"
- )
- gcps = self.get_gcps()
- if gcps:
- kwargs.setdefault("gcps", gcps)
-
- gcps_or_rpcs = "gcps" in kwargs or "rpcs" in kwargs
- src_affine = None if gcps_or_rpcs else self.transform(recalc=True)
- if transform is None:
- dst_affine, dst_width, dst_height = _make_dst_affine(
- self._obj, self.crs, dst_crs, resolution, shape, **kwargs
- )
- else:
- dst_affine = transform
- if shape is not None:
- dst_height, dst_width = shape
- else:
- dst_height, dst_width = self.shape
-
- dst_data = self._create_dst_data(dst_height, dst_width)
-
- dst_nodata = self._get_dst_nodata(nodata)
-
- rasterio.warp.reproject(
- source=self._obj.values,
- destination=dst_data,
- src_transform=src_affine,
- src_crs=self.crs,
- src_nodata=self.nodata,
- dst_transform=dst_affine,
- dst_crs=dst_crs,
- dst_nodata=dst_nodata,
- resampling=resampling,
- **kwargs,
- )
- # add necessary attributes
- new_attrs = _generate_attrs(self._obj, dst_nodata)
- # make sure dimensions with coordinates renamed to x,y
- dst_dims: list[Hashable] = []
- for dim in self._obj.dims:
- if dim == self.x_dim:
- dst_dims.append("x")
- elif dim == self.y_dim:
- dst_dims.append("y")
- else:
- dst_dims.append(dim)
- xda = xarray.DataArray(
- name=self._obj.name,
- data=dst_data,
- coords=_make_coords(
- src_data_array=self._obj,
- dst_affine=dst_affine,
- dst_width=dst_width,
- dst_height=dst_height,
- force_generate=gcps_or_rpcs,
- ),
- dims=tuple(dst_dims),
- attrs=new_attrs,
- )
- xda.encoding = self._obj.encoding
- xda.rio.write_transform(dst_affine, inplace=True)
- xda.rio.write_crs(dst_crs, inplace=True)
- xda.rio.write_coordinate_system(inplace=True)
- return xda
-
- def _get_dst_nodata(self, nodata: Optional[float]) -> Optional[float]:
- default_nodata = (
- _NODATA_DTYPE_MAP.get(dtype_rev[self._obj.dtype.name])
- if self.nodata is None
- else self.nodata
- )
- dst_nodata = default_nodata if nodata is None else nodata
- return dst_nodata
-
- def _create_dst_data(self, dst_height: int, dst_width: int) -> numpy.ndarray:
- extra_dim = self._check_dimensions()
- if extra_dim:
- dst_data = numpy.zeros(
- (self._obj[extra_dim].size, dst_height, dst_width),
- dtype=self._obj.dtype.type,
- )
- else:
- dst_data = numpy.zeros((dst_height, dst_width), dtype=self._obj.dtype.type)
- return dst_data
-
-[docs] def reproject_match(
- self,
- match_data_array: Union[xarray.DataArray, xarray.Dataset],
- resampling: Resampling = Resampling.nearest,
- **reproject_kwargs,
- ) -> xarray.DataArray:
- """
- Reproject a DataArray object to match the resolution, projection,
- and region of another DataArray.
-
- Powered by :func:`rasterio.warp.reproject`
-
- .. note:: Only 2D/3D arrays with dimensions 'x'/'y' are currently supported.
- Requires either a grid mapping variable with 'spatial_ref' or
- a 'crs' attribute to be set containing a valid CRS.
- If using a WKT (e.g. from spatiareference.org), make sure it is an OGC WKT.
-
- .. versionadded:: 0.9 reproject_kwargs
-
- Parameters
- ----------
- match_data_array: :obj:`xarray.DataArray` | :obj:`xarray.Dataset`
- DataArray of the target resolution and projection.
- resampling: rasterio.enums.Resampling, optional
- See :func:`rasterio.warp.reproject` for more details.
- **reproject_kwargs:
- Other options to pass to :meth:`rioxarray.raster_array.RasterArray.reproject`
-
- Returns
- --------
- :obj:`xarray.DataArray`:
- Contains the data from the src_data_array, reprojected to match
- match_data_array.
- """
- reprojected_data_array = self.reproject(
- match_data_array.rio.crs,
- transform=match_data_array.rio.transform(recalc=True),
- shape=match_data_array.rio.shape,
- resampling=resampling,
- **reproject_kwargs,
- )
- # hack to resolve: https://github.com/corteva/rioxarray/issues/298
- # may be resolved in the future by flexible indexes:
- # https://github.com/pydata/xarray/pull/4489#issuecomment-831809607
- x_attrs = reprojected_data_array[reprojected_data_array.rio.x_dim].attrs.copy()
- y_attrs = reprojected_data_array[reprojected_data_array.rio.y_dim].attrs.copy()
- # ensure coords the same
- reprojected_data_array = reprojected_data_array.assign_coords(
- {
- reprojected_data_array.rio.x_dim: copy.copy(
- match_data_array[match_data_array.rio.x_dim].values
- ),
- reprojected_data_array.rio.y_dim: copy.copy(
- match_data_array[match_data_array.rio.y_dim].values
- ),
- }
- )
- # ensure attributes copied
- reprojected_data_array[reprojected_data_array.rio.x_dim].attrs = x_attrs
- reprojected_data_array[reprojected_data_array.rio.y_dim].attrs = y_attrs
- return reprojected_data_array
-
-[docs] def pad_xy(
- self,
- minx: float,
- miny: float,
- maxx: float,
- maxy: float,
- constant_values: Union[
- float, tuple[int, int], Mapping[Any, tuple[int, int]], None
- ] = None,
- ) -> xarray.DataArray:
- """Pad the array to x,y bounds.
-
- .. versionadded:: 0.0.29
-
- Parameters
- ----------
- minx: float
- Minimum bound for x coordinate.
- miny: float
- Minimum bound for y coordinate.
- maxx: float
- Maximum bound for x coordinate.
- maxy: float
- Maximum bound for y coordinate.
- constant_values: scalar, tuple or mapping of hashable to tuple
- The value used for padding. If None, nodata will be used if it is
- set, and numpy.nan otherwise.
-
-
- Returns
- -------
- :obj:`xarray.DataArray`:
- The padded object.
- """
- # pylint: disable=too-many-locals
- left, bottom, right, top = self._internal_bounds()
- resolution_x, resolution_y = self.resolution()
- y_before = y_after = 0
- x_before = x_after = 0
- y_coord: Union[xarray.DataArray, numpy.ndarray] = self._obj[self.y_dim]
- x_coord: Union[xarray.DataArray, numpy.ndarray] = self._obj[self.x_dim]
-
- if top - resolution_y < maxy:
- new_y_coord: numpy.ndarray = numpy.arange(bottom, maxy, -resolution_y)[::-1]
- y_before = len(new_y_coord) - len(y_coord)
- y_coord = new_y_coord
- top = y_coord[0]
- if bottom + resolution_y > miny:
- new_y_coord = numpy.arange(top, miny, resolution_y)
- y_after = len(new_y_coord) - len(y_coord)
- y_coord = new_y_coord
- bottom = y_coord[-1]
-
- if left - resolution_x > minx:
- new_x_coord: numpy.ndarray = numpy.arange(right, minx, -resolution_x)[::-1]
- x_before = len(new_x_coord) - len(x_coord)
- x_coord = new_x_coord
- left = x_coord[0]
- if right + resolution_x < maxx:
- new_x_coord = numpy.arange(left, maxx, resolution_x)
- x_after = len(new_x_coord) - len(x_coord)
- x_coord = new_x_coord
- right = x_coord[-1]
-
- if constant_values is None:
- constant_values = numpy.nan if self.nodata is None else self.nodata
-
- superset = self._obj.pad(
- pad_width={
- self.x_dim: (x_before, x_after),
- self.y_dim: (y_before, y_after),
- },
- constant_values=constant_values, # type: ignore
- ).rio.set_spatial_dims(x_dim=self.x_dim, y_dim=self.y_dim, inplace=True)
- superset[self.x_dim] = x_coord
- superset[self.y_dim] = y_coord
- superset.rio.write_transform(inplace=True)
- return superset
-
-[docs] def pad_box(
- self,
- minx: float,
- miny: float,
- maxx: float,
- maxy: float,
- constant_values: Union[
- float, tuple[int, int], Mapping[Any, tuple[int, int]], None
- ] = None,
- ) -> xarray.DataArray:
- """Pad the :obj:`xarray.DataArray` to a bounding box
-
- .. versionadded:: 0.0.29
-
- Parameters
- ----------
- minx: float
- Minimum bound for x coordinate.
- miny: float
- Minimum bound for y coordinate.
- maxx: float
- Maximum bound for x coordinate.
- maxy: float
- Maximum bound for y coordinate.
- constant_values: scalar, tuple or mapping of hashable to tuple
- The value used for padding. If None, nodata will be used if it is
- set, and numpy.nan otherwise.
-
-
- Returns
- -------
- :obj:`xarray.DataArray`:
- The padded object.
- """
- resolution_x, resolution_y = self.resolution()
-
- pad_minx = minx - abs(resolution_x) / 2.0
- pad_miny = miny - abs(resolution_y) / 2.0
- pad_maxx = maxx + abs(resolution_x) / 2.0
- pad_maxy = maxy + abs(resolution_y) / 2.0
-
- pd_array = self.pad_xy(pad_minx, pad_miny, pad_maxx, pad_maxy, constant_values)
-
- # make sure correct attributes preserved & projection added
- _add_attrs_proj(pd_array, self._obj)
-
- return pd_array
-
-[docs] def clip_box(
- self,
- minx: float,
- miny: float,
- maxx: float,
- maxy: float,
- auto_expand: Union[bool, int] = False,
- auto_expand_limit: int = 3,
- crs: Optional[Any] = None,
- ) -> xarray.DataArray:
- """Clip the :obj:`xarray.DataArray` by a bounding box.
-
- .. versionadded:: 0.12 crs
-
- Parameters
- ----------
- minx: float
- Minimum bound for x coordinate.
- miny: float
- Minimum bound for y coordinate.
- maxx: float
- Maximum bound for x coordinate.
- maxy: float
- Maximum bound for y coordinate.
- auto_expand: Union[bool, int]
- If True, it will expand clip search if only 1D raster found with clip.
- auto_expand_limit: int
- maximum number of times the clip will be retried before raising
- an exception.
- crs: :obj:`rasterio.crs.CRS`, optional
- The CRS of the bounding box. Default is to assume it is the same
- as the dataset.
-
- Returns
- -------
- xarray.DataArray:
- The clipped object.
- """
- if self.width == 1 or self.height == 1:
- raise OneDimensionalRaster(
- "At least one of the raster x,y coordinates has only one point."
- f"{_get_data_var_message(self._obj)}"
- )
-
- if crs is not None and self.crs is None:
- raise MissingCRS(
- "CRS not found. Please set the CRS with 'rio.write_crs()'."
- f"{_get_data_var_message(self._obj)}"
- )
-
- crs = crs_from_user_input(crs) if crs is not None else self.crs
- if self.crs != crs:
- minx, miny, maxx, maxy = rasterio.warp.transform_bounds(
- src_crs=crs,
- dst_crs=self.crs,
- left=minx,
- bottom=miny,
- right=maxx,
- top=maxy,
- )
- if (
- self.crs is not None
- and self.crs.is_geographic # pylint: disable=no-member
- and minx > maxx
- ):
- raise RioXarrayError(
- "Transformed bounds crossed the antimeridian. "
- "Please transform your bounds manually using "
- "rasterio.warp.transform_bounds and clip using "
- "the bounding box(es) desired."
- )
-
- resolution_x, resolution_y = self.resolution()
- # make sure that if the coordinates are
- # in reverse order that it still works
- left, bottom, right, top = _order_bounds(
- minx=minx,
- miny=miny,
- maxx=maxx,
- maxy=maxy,
- resolution_x=resolution_x,
- resolution_y=resolution_y,
- )
-
- # pull the data out
- window_error = None
- try:
- window = rasterio.windows.from_bounds(
- left=numpy.array(left).item(),
- bottom=numpy.array(bottom).item(),
- right=numpy.array(right).item(),
- top=numpy.array(top).item(),
- transform=self.transform(recalc=True),
- )
- cl_array: xarray.DataArray = self.isel_window(window) # type: ignore
- except rasterio.errors.WindowError as err:
- window_error = err
-
- # check that the window has data in it
- if window_error or cl_array.rio.width <= 1 or cl_array.rio.height <= 1:
- if auto_expand and auto_expand < auto_expand_limit:
- return self.clip_box(
- minx=minx - abs(resolution_x) / 2.0,
- miny=miny - abs(resolution_y) / 2.0,
- maxx=maxx + abs(resolution_x) / 2.0,
- maxy=maxy + abs(resolution_y) / 2.0,
- auto_expand=int(auto_expand) + 1,
- auto_expand_limit=auto_expand_limit,
- )
- if window_error:
- raise window_error
- if cl_array.rio.width < 1 or cl_array.rio.height < 1:
- raise NoDataInBounds(
- f"No data found in bounds.{_get_data_var_message(self._obj)}"
- )
- if cl_array.rio.width == 1 or cl_array.rio.height == 1:
- raise OneDimensionalRaster(
- "At least one of the clipped raster x,y coordinates"
- " has only one point."
- f"{_get_data_var_message(self._obj)}"
- )
-
- # make sure correct attributes preserved & projection added
- _add_attrs_proj(cl_array, self._obj)
- return cl_array
-
-[docs] def clip(
- self,
- geometries: Iterable,
- crs: Optional[Any] = None,
- all_touched: bool = False,
- drop: bool = True,
- invert: bool = False,
- from_disk: bool = False,
- ) -> xarray.DataArray:
- """
- Crops a :obj:`xarray.DataArray` by geojson like geometry dicts.
-
- Powered by `rasterio.features.geometry_mask`.
-
- Examples:
-
- >>> geometry = ''' {"type": "Polygon",
- ... "coordinates": [
- ... [[-94.07955380199459, 41.69085871273774],
- ... [-94.06082436942204, 41.69103313774798],
- ... [-94.06063203899649, 41.67932439500822],
- ... [-94.07935807746362, 41.679150041277325],
- ... [-94.07955380199459, 41.69085871273774]]]}'''
- >>> cropping_geometries = [geojson.loads(geometry)]
- >>> xds = xarray.open_rasterio('cool_raster.tif')
- >>> cropped = xds.rio.clip(geometries=cropping_geometries, crs=4326)
-
-
- .. versionadded:: 0.2 from_disk
-
- Parameters
- ----------
- geometries: Iterable
- A list of geojson geometry dicts or objects with __geo_interface__ with
- if you have rasterio 1.2+.
- crs: :obj:`rasterio.crs.CRS`, optional
- The CRS of the input geometries. Default is to assume it is the same
- as the dataset.
- all_touched : bool, optional
- If True, all pixels touched by geometries will be burned in. If
- false, only pixels whose center is within the polygon or that
- are selected by Bresenham's line algorithm will be burned in.
- drop: bool, optional
- If True, drop the data outside of the extent of the mask geoemtries
- Otherwise, it will return the same raster with the data masked.
- Default is True.
- invert: boolean, optional
- If False, pixels that do not overlap shapes will be set as nodata.
- Otherwise, pixels that overlap the shapes will be set as nodata.
- False by default.
- from_disk: boolean, optional
- If True, it will clip from disk using rasterio.mask.mask if possible.
- This is beneficial when the size of the data is larger than memory.
- Default is False.
-
- Returns
- -------
- :obj:`xarray.DataArray`:
- The clipped object.
- """
- if self.crs is None:
- raise MissingCRS(
- "CRS not found. Please set the CRS with 'rio.write_crs()'."
- f"{_get_data_var_message(self._obj)}"
- )
- crs = crs_from_user_input(crs) if crs is not None else self.crs
- if self.crs != crs:
- geometries = rasterio.warp.transform_geom(crs, self.crs, geometries)
- cropped_ds = None
- if from_disk:
- cropped_ds = _clip_from_disk(
- self._obj,
- geometries=geometries,
- all_touched=all_touched,
- drop=drop,
- invert=invert,
- )
- if cropped_ds is None:
- cropped_ds = _clip_xarray(
- self._obj,
- geometries=geometries,
- all_touched=all_touched,
- drop=drop,
- invert=invert,
- )
-
- if (
- cropped_ds.coords[self.x_dim].size < 1
- or cropped_ds.coords[self.y_dim].size < 1
- ):
- raise NoDataInBounds(
- f"No data found in bounds.{_get_data_var_message(self._obj)}"
- )
-
- # make sure correct attributes preserved & projection added
- _add_attrs_proj(cropped_ds, self._obj)
-
- return cropped_ds
-
- def _interpolate_na(
- self, src_data: Any, method: Literal["linear", "nearest", "cubic"] = "nearest"
- ) -> numpy.ndarray:
- """
- This method uses scipy.interpolate.griddata to interpolate missing data.
-
- Parameters
- ----------
- src_data: Any
- Input data array.
- method: {'linear', 'nearest', 'cubic'}, optional
- The method to use for interpolation in `scipy.interpolate.griddata`.
-
- Returns
- -------
- :class:`numpy.ndarray`:
- An interpolated :class:`numpy.ndarray`.
- """
- try:
- from scipy.interpolate import ( # pylint: disable=import-outside-toplevel,import-error
- griddata,
- )
- except ModuleNotFoundError as err:
- raise ModuleNotFoundError(
- "scipy is not found. Use rioxarray[interp] to install."
- ) from err
-
- src_data_flat = src_data.flatten()
- try:
- data_isnan = numpy.isnan(self.nodata) # type: ignore
- except TypeError:
- data_isnan = False
- if not data_isnan:
- data_bool = src_data_flat != self.nodata
- else:
- data_bool = ~numpy.isnan(src_data_flat)
-
- if not data_bool.any():
- return src_data
-
- x_coords, y_coords = numpy.meshgrid(
- self._obj.coords[self.x_dim].values, self._obj.coords[self.y_dim].values
- )
-
- return griddata(
- points=(x_coords.flatten()[data_bool], y_coords.flatten()[data_bool]),
- values=src_data_flat[data_bool],
- xi=(x_coords, y_coords),
- method=method,
- fill_value=self.nodata,
- )
-
-[docs] def interpolate_na(
- self, method: Literal["linear", "nearest", "cubic"] = "nearest"
- ) -> xarray.DataArray:
- """
- This method uses scipy.interpolate.griddata to interpolate missing data.
-
- .. warning:: scipy is an optional dependency.
-
- Parameters
- ----------
- method: {'linear', 'nearest', 'cubic'}, optional
- The method to use for interpolation in `scipy.interpolate.griddata`.
-
- Returns
- -------
- :obj:`xarray.DataArray`:
- An interpolated :obj:`xarray.DataArray` object.
- """
- if self.nodata is None:
- raise RioXarrayError(
- "nodata not found. Please set the nodata with 'rio.write_nodata()'."
- f"{_get_data_var_message(self._obj)}"
- )
-
- extra_dim = self._check_dimensions()
- if extra_dim:
- interp_data = []
- for _, sub_xds in self._obj.groupby(extra_dim):
- interp_data.append(
- self._interpolate_na(sub_xds.load().data, method=method)
- )
- interp_data = numpy.array(interp_data) # type: ignore
- else:
- interp_data = self._interpolate_na(self._obj.load().data, method=method) # type: ignore
-
- interp_array = xarray.DataArray(
- name=self._obj.name,
- data=interp_data,
- coords=self._obj.coords,
- dims=self._obj.dims,
- attrs=self._obj.attrs,
- )
- interp_array.encoding = self._obj.encoding
-
- # make sure correct attributes preserved & projection added
- _add_attrs_proj(interp_array, self._obj)
-
- return interp_array
-
-[docs] def to_raster(
- self,
- raster_path: Union[str, os.PathLike],
- driver: Optional[str] = None,
- dtype: Optional[Union[str, numpy.dtype]] = None,
- tags: Optional[dict[str, str]] = None,
- windowed: bool = False,
- recalc_transform: bool = True,
- lock: Optional[bool] = None,
- compute: bool = True,
- **profile_kwargs,
- ) -> None:
- """
- Export the DataArray to a raster file.
-
- ..versionadded:: 0.2 lock
-
- Parameters
- ----------
- raster_path: Union[str, os.PathLike]
- The path to output the raster to.
- driver: str, optional
- The name of the GDAL/rasterio driver to use to export the raster.
- Default is "GTiff" if rasterio < 1.2 otherwise it will autodetect.
- dtype: str, optional
- The data type to write the raster to. Default is the datasets dtype.
- tags: dict, optional
- A dictionary of tags to write to the raster.
- windowed: bool, optional
- If True, it will write using the windows of the output raster.
- This is useful for loading data in chunks when writing. Does not
- do anything when writing with dask.
- Default is False.
- recalc_transform: bool, optional
- If False, it will write the raster with the cached transform from
- the dataarray rather than recalculating it.
- Default is True.
- lock: boolean or Lock, optional
- Lock to use to write data using dask.
- If not supplied, it will use a single process for writing.
- compute: bool, optional
- If True and data is a dask array, then compute and save
- the data immediately. If False, return a dask Delayed object.
- Call ".compute()" on the Delayed object to compute the result
- later. Call ``dask.compute(delayed1, delayed2)`` to save
- multiple delayed files at once. Default is True.
- **profile_kwargs
- Additional keyword arguments to pass into writing the raster. The
- nodata, transform, crs, count, width, and height attributes
- are ignored.
-
- Returns
- -------
- :obj:`dask.Delayed`:
- If the data array is a dask array and compute
- is True. Otherwise None is returned.
-
- """
- if driver is None:
- extension = Path(raster_path).suffix
- # https://github.com/rasterio/rasterio/pull/2008
- if extension in (".tif", ".tiff"):
- driver = "GTiff"
-
- # get the output profile from the rasterio object
- # if opened with xarray.open_rasterio()
- try:
- out_profile = self._manager.acquire().profile # type: ignore
- except AttributeError:
- out_profile = {}
- out_profile.update(profile_kwargs)
-
- # filter out the generated attributes
- out_profile = {
- key: value
- for key, value in out_profile.items()
- if key
- not in (
- "driver",
- "height",
- "width",
- "crs",
- "transform",
- "nodata",
- "count",
- "dtype",
- )
- }
- rio_nodata = (
- self.encoded_nodata if self.encoded_nodata is not None else self.nodata
- )
-
- return RasterioWriter(raster_path=raster_path).to_raster(
- xarray_dataarray=self._obj,
- tags=tags,
- driver=driver,
- height=int(self.height),
- width=int(self.width),
- count=int(self.count),
- dtype=dtype,
- crs=self.crs,
- transform=self.transform(recalc=recalc_transform),
- gcps=self.get_gcps(),
- nodata=rio_nodata,
- windowed=windowed,
- lock=lock,
- compute=compute,
- **out_profile,
- )
-
-"""
-This module is an extension for xarray to provide rasterio capabilities
-to xarray datasets.
-"""
-import os
-from collections.abc import Iterable, Mapping
-from typing import Any, Literal, Optional, Union
-from uuid import uuid4
-
-import numpy
-import rasterio.crs
-import xarray
-from affine import Affine
-from rasterio.enums import Resampling
-
-from rioxarray._options import SKIP_MISSING_SPATIAL_DIMS, get_option
-from rioxarray.exceptions import MissingSpatialDimensionError, RioXarrayError
-from rioxarray.rioxarray import XRasterBase, _get_spatial_dims
-
-
-[docs]@xarray.register_dataset_accessor("rio")
-class RasterDataset(XRasterBase):
- """This is the GIS extension for :class:`xarray.Dataset`"""
-
- @property
- def vars(self) -> list:
- """list: Returns non-coordinate varibles"""
- return list(self._obj.data_vars)
-
- @property
- def crs(self) -> Optional[rasterio.crs.CRS]:
- """:obj:`rasterio.crs.CRS`:
- Retrieve projection from `xarray.Dataset`
- """
- if self._crs is not None:
- return None if self._crs is False else self._crs
- self._crs = super().crs
- if self._crs is not None:
- return self._crs
- # ensure all the CRS of the variables are the same
- crs_list = []
- for var in self.vars:
- if self._obj[var].rio.crs is not None:
- crs_list.append(self._obj[var].rio.crs)
- try:
- crs = crs_list[0]
- except IndexError:
- crs = None
- if crs is None:
- self._crs = False
- return None
- if all(crs_i == crs for crs_i in crs_list):
- self._crs = crs
- else:
- raise RioXarrayError(f"CRS in DataArrays differ in the Dataset: {crs_list}")
- return self._crs
-
-[docs] def reproject(
- self,
- dst_crs: Any,
- resolution: Optional[Union[float, tuple[float, float]]] = None,
- shape: Optional[tuple[int, int]] = None,
- transform: Optional[Affine] = None,
- resampling: Resampling = Resampling.nearest,
- nodata: Optional[float] = None,
- **kwargs,
- ) -> xarray.Dataset:
- """
- Reproject :class:`xarray.Dataset` objects
-
- .. note:: Only 2D/3D arrays with dimensions 'x'/'y' are currently supported.
- Others are appended as is.
- Requires either a grid mapping variable with 'spatial_ref' or
- a 'crs' attribute to be set containing a valid CRS.
- If using a WKT (e.g. from spatiareference.org), make sure it is an OGC WKT.
-
- .. note:: To re-project with dask, see
- `odc-geo <https://odc-geo.readthedocs.io/>`__ &
- `pyresample <https://pyresample.readthedocs.io/>`__.
-
- .. versionadded:: 0.0.27 shape
- .. versionadded:: 0.0.28 transform
- .. versionadded:: 0.5.0 nodata, kwargs
-
- Parameters
- ----------
- dst_crs: str
- OGC WKT string or Proj.4 string.
- resolution: float or tuple(float, float), optional
- Size of a destination pixel in destination projection units
- (e.g. degrees or metres).
- shape: tuple(int, int), optional
- Shape of the destination in pixels (dst_height, dst_width). Cannot be used
- together with resolution.
- transform: Affine, optional
- The destination transform.
- resampling: rasterio.enums.Resampling, optional
- See :func:`rasterio.warp.reproject` for more details.
- nodata: float, optional
- The nodata value used to initialize the destination;
- it will remain in all areas not covered by the reprojected source.
- Defaults to the nodata value of the source image if none provided
- and exists or attempts to find an appropriate value by dtype.
- **kwargs: dict
- Additional keyword arguments to pass into :func:`rasterio.warp.reproject`.
- To override:
- - src_transform: `rio.write_transform`
- - src_crs: `rio.write_crs`
- - src_nodata: `rio.write_nodata`
-
- Returns
- --------
- :class:`xarray.Dataset`:
- The reprojected Dataset.
- """
- resampled_dataset = xarray.Dataset(attrs=self._obj.attrs)
- for var in self.vars:
- try:
- x_dim, y_dim = _get_spatial_dims(self._obj, var)
- resampled_dataset[var] = (
- self._obj[var]
- .rio.set_spatial_dims(x_dim=x_dim, y_dim=y_dim, inplace=True)
- .rio.reproject(
- dst_crs,
- resolution=resolution,
- shape=shape,
- transform=transform,
- resampling=resampling,
- nodata=nodata,
- **kwargs,
- )
- )
- except MissingSpatialDimensionError:
- if len(self._obj[var].dims) >= 2 and not get_option(
- SKIP_MISSING_SPATIAL_DIMS
- ):
- raise
- resampled_dataset[var] = self._obj[var].copy()
- return resampled_dataset
-
-[docs] def reproject_match(
- self,
- match_data_array: Union[xarray.DataArray, xarray.Dataset],
- resampling: Resampling = Resampling.nearest,
- **reproject_kwargs,
- ) -> xarray.Dataset:
- """
- Reproject a Dataset object to match the resolution, projection,
- and region of another DataArray.
-
- .. note:: Only 2D/3D arrays with dimensions 'x'/'y' are currently supported.
- Others are appended as is.
- Requires either a grid mapping variable with 'spatial_ref' or
- a 'crs' attribute to be set containing a valid CRS.
- If using a WKT (e.g. from spatiareference.org), make sure it is an OGC WKT.
-
- .. versionadded:: 0.9 reproject_kwargs
-
- Parameters
- ----------
- match_data_array: :obj:`xarray.DataArray` | :obj:`xarray.Dataset`
- Dataset with the target resolution and projection.
- resampling: rasterio.enums.Resampling, optional
- See :func:`rasterio.warp.reproject` for more details.
- **reproject_kwargs:
- Other options to pass to :meth:`rioxarray.raster_dataset.RasterDataset.reproject`
-
- Returns
- --------
- :obj:`xarray.Dataset`:
- Contains the data from the src_data_array,
- reprojected to match match_data_array.
- """
- resampled_dataset = xarray.Dataset(attrs=self._obj.attrs)
- for var in self.vars:
- try:
- x_dim, y_dim = _get_spatial_dims(self._obj, var)
- resampled_dataset[var] = (
- self._obj[var]
- .rio.set_spatial_dims(x_dim=x_dim, y_dim=y_dim, inplace=True)
- .rio.reproject_match(
- match_data_array, resampling=resampling, **reproject_kwargs
- )
- )
- except MissingSpatialDimensionError:
- if len(self._obj[var].dims) >= 2 and not get_option(
- SKIP_MISSING_SPATIAL_DIMS
- ):
- raise
- resampled_dataset[var] = self._obj[var].copy()
- return resampled_dataset.rio.set_spatial_dims(
- x_dim=self.x_dim, y_dim=self.y_dim, inplace=True
- )
-
-[docs] def pad_box(
- self,
- minx: float,
- miny: float,
- maxx: float,
- maxy: float,
- constant_values: Union[
- float, tuple[int, int], Mapping[Any, tuple[int, int]], None
- ] = None,
- ) -> xarray.Dataset:
- """Pad the :class:`xarray.Dataset` to a bounding box.
-
- .. warning:: Only works if all variables in the dataset have the
- same coordinates.
-
- .. warning:: Pads variables that have dimensions 'x'/'y'. Others are appended as is.
-
- Parameters
- ----------
- minx: float
- Minimum bound for x coordinate.
- miny: float
- Minimum bound for y coordinate.
- maxx: float
- Maximum bound for x coordinate.
- maxy: float
- Maximum bound for y coordinate.
- constant_values: scalar, tuple or mapping of hashable to tuple
- The value used for padding. If None, nodata will be used if it is
- set, and numpy.nan otherwise.
-
- Returns
- -------
- :obj:`xarray.Dataset`:
- The padded object.
- """
- padded_dataset = xarray.Dataset(attrs=self._obj.attrs)
- for var in self.vars:
- try:
- x_dim, y_dim = _get_spatial_dims(self._obj, var)
- padded_dataset[var] = (
- self._obj[var]
- .rio.set_spatial_dims(x_dim=x_dim, y_dim=y_dim, inplace=True)
- .rio.pad_box(
- minx, miny, maxx, maxy, constant_values=constant_values
- )
- )
- except MissingSpatialDimensionError:
- if len(self._obj[var].dims) >= 2 and not get_option(
- SKIP_MISSING_SPATIAL_DIMS
- ):
- raise
- padded_dataset[var] = self._obj[var].copy()
- return padded_dataset.rio.set_spatial_dims(
- x_dim=self.x_dim, y_dim=self.y_dim, inplace=True
- )
-
-[docs] def clip_box(
- self,
- minx: float,
- miny: float,
- maxx: float,
- maxy: float,
- auto_expand: Union[bool, int] = False,
- auto_expand_limit: int = 3,
- crs: Optional[Any] = None,
- ) -> xarray.Dataset:
- """Clip the :class:`xarray.Dataset` by a bounding box in dimensions 'x'/'y'.
-
- .. warning:: Clips variables that have dimensions 'x'/'y'. Others are appended as is.
-
- .. versionadded:: 0.12 crs
-
- Parameters
- ----------
- minx: float
- Minimum bound for x coordinate.
- miny: float
- Minimum bound for y coordinate.
- maxx: float
- Maximum bound for x coordinate.
- maxy: float
- Maximum bound for y coordinate.
- auto_expand: bool
- If True, it will expand clip search if only 1D raster found with clip.
- auto_expand_limit: int
- maximum number of times the clip will be retried before raising
- an exception.
- crs: :obj:`rasterio.crs.CRS`, optional
- The CRS of the bounding box. Default is to assume it is the same
- as the dataset.
-
- Returns
- -------
- Dataset:
- The clipped object.
- """
- clipped_dataset = xarray.Dataset(attrs=self._obj.attrs)
- for var in self.vars:
- try:
- x_dim, y_dim = _get_spatial_dims(self._obj, var)
- clipped_dataset[var] = (
- self._obj[var]
- .rio.set_spatial_dims(x_dim=x_dim, y_dim=y_dim, inplace=True)
- .rio.clip_box(
- minx,
- miny,
- maxx,
- maxy,
- auto_expand=auto_expand,
- auto_expand_limit=auto_expand_limit,
- crs=crs,
- )
- )
- except MissingSpatialDimensionError:
- if len(self._obj[var].dims) >= 2 and not get_option(
- SKIP_MISSING_SPATIAL_DIMS
- ):
- raise
- clipped_dataset[var] = self._obj[var].copy()
- return clipped_dataset.rio.set_spatial_dims(
- x_dim=self.x_dim, y_dim=self.y_dim, inplace=True
- )
-
-[docs] def clip(
- self,
- geometries: Iterable,
- crs: Optional[Any] = None,
- all_touched: bool = False,
- drop: bool = True,
- invert: bool = False,
- from_disk: bool = False,
- ) -> xarray.Dataset:
- """
- Crops a :class:`xarray.Dataset` by geojson like geometry dicts in dimensions 'x'/'y'.
-
- .. warning:: Clips variables that have dimensions 'x'/'y'. Others are appended as is.
-
- Powered by `rasterio.features.geometry_mask`.
-
- Examples:
-
- >>> geometry = ''' {"type": "Polygon",
- ... "coordinates": [
- ... [[-94.07955380199459, 41.69085871273774],
- ... [-94.06082436942204, 41.69103313774798],
- ... [-94.06063203899649, 41.67932439500822],
- ... [-94.07935807746362, 41.679150041277325],
- ... [-94.07955380199459, 41.69085871273774]]]}'''
- >>> cropping_geometries = [geojson.loads(geometry)]
- >>> xds = xarray.open_rasterio('cool_raster.tif')
- >>> cropped = xds.rio.clip(geometries=cropping_geometries, crs=4326)
-
-
- .. versionadded:: 0.2 from_disk
-
- Parameters
- ----------
- geometries: list
- A list of geojson geometry dicts.
- crs: :obj:`rasterio.crs.CRS`, optional
- The CRS of the input geometries. Default is to assume it is the same
- as the dataset.
- all_touched : boolean, optional
- If True, all pixels touched by geometries will be burned in. If
- false, only pixels whose center is within the polygon or that
- are selected by Bresenham's line algorithm will be burned in.
- drop: bool, optional
- If True, drop the data outside of the extent of the mask geometries
- Otherwise, it will return the same raster with the data masked.
- Default is True.
- invert: boolean, optional
- If False, pixels that do not overlap shapes will be set as nodata.
- Otherwise, pixels that overlap the shapes will be set as nodata.
- False by default.
- from_disk: boolean, optional
- If True, it will clip from disk using rasterio.mask.mask if possible.
- This is beneficial when the size of the data is larger than memory.
- Default is False.
-
- Returns
- -------
- :obj:`xarray.Dataset`:
- The clipped object.
- """
- clipped_dataset = xarray.Dataset(attrs=self._obj.attrs)
- for var in self.vars:
- try:
- x_dim, y_dim = _get_spatial_dims(self._obj, var)
- clipped_dataset[var] = (
- self._obj[var]
- .rio.set_spatial_dims(x_dim=x_dim, y_dim=y_dim, inplace=True)
- .rio.clip(
- geometries,
- crs=crs,
- all_touched=all_touched,
- drop=drop,
- invert=invert,
- from_disk=from_disk,
- )
- )
- except MissingSpatialDimensionError:
- if len(self._obj[var].dims) >= 2 and not get_option(
- SKIP_MISSING_SPATIAL_DIMS
- ):
- raise
- clipped_dataset[var] = self._obj[var].copy()
- return clipped_dataset.rio.set_spatial_dims(
- x_dim=self.x_dim, y_dim=self.y_dim, inplace=True
- )
-
-[docs] def interpolate_na(
- self, method: Literal["linear", "nearest", "cubic"] = "nearest"
- ) -> xarray.Dataset:
- """
- This method uses `scipy.interpolate.griddata` to interpolate missing data.
-
- .. warning:: scipy is an optional dependency.
-
- .. warning:: Interpolates variables that have dimensions 'x'/'y'. Others are appended as is.
-
- Parameters
- ----------
- method: {'linear', 'nearest', 'cubic'}, optional
- The method to use for interpolation in `scipy.interpolate.griddata`.
-
- Returns
- -------
- :obj:`xarray.DataArray`:
- The interpolated object.
- """
- interpolated_dataset = xarray.Dataset(attrs=self._obj.attrs)
- for var in self.vars:
- try:
- x_dim, y_dim = _get_spatial_dims(self._obj, var)
- interpolated_dataset[var] = (
- self._obj[var]
- .rio.set_spatial_dims(x_dim=x_dim, y_dim=y_dim, inplace=True)
- .rio.interpolate_na(method=method)
- )
- except MissingSpatialDimensionError:
- if len(self._obj[var].dims) >= 2 and not get_option(
- SKIP_MISSING_SPATIAL_DIMS
- ):
- raise
- interpolated_dataset[var] = self._obj[var].copy()
- return interpolated_dataset.rio.set_spatial_dims(
- x_dim=self.x_dim, y_dim=self.y_dim, inplace=True
- )
-
-[docs] def to_raster(
- self,
- raster_path: Union[str, os.PathLike],
- driver: Optional[str] = None,
- dtype: Optional[Union[str, numpy.dtype]] = None,
- tags: Optional[dict[str, str]] = None,
- windowed: bool = False,
- recalc_transform: bool = True,
- lock: Optional[bool] = None,
- compute: bool = True,
- **profile_kwargs,
- ) -> None:
- """
- Export the Dataset to a raster file. Only works with 2D data.
-
- ..versionadded:: 0.2 lock
-
- Parameters
- ----------
- raster_path: str
- The path to output the raster to.
- driver: str, optional
- The name of the GDAL/rasterio driver to use to export the raster.
- Default is "GTiff" if rasterio < 1.2 otherwise it will autodetect.
- dtype: str, optional
- The data type to write the raster to. Default is the datasets dtype.
- tags: dict, optional
- A dictionary of tags to write to the raster.
- windowed: bool, optional
- If True, it will write using the windows of the output raster.
- This is useful for loading data in chunks when writing. Does not
- do anything when writing with dask.
- Default is False.
- recalc_transform: bool, optional
- If False, it will write the raster with the cached transform from
- the dataset rather than recalculating it.
- Default is True.
- lock: boolean or Lock, optional
- Lock to use to write data using dask.
- If not supplied, it will use a single process for writing.
- compute: bool, optional
- If True and data is a dask array, then compute and save
- the data immediately. If False, return a dask Delayed object.
- Call ".compute()" on the Delayed object to compute the result
- later. Call ``dask.compute(delayed1, delayed2)`` to save
- multiple delayed files at once. Default is True.
- **profile_kwargs
- Additional keyword arguments to pass into writing the raster. The
- nodata, transform, crs, count, width, and height attributes
- are ignored.
-
- Returns
- -------
- :obj:`dask.Delayed`:
- If the data array is a dask array and compute
- is True. Otherwise None is returned.
-
- """
- variable_dim = f"band_{uuid4()}"
- data_array = self._obj.to_array(dim=variable_dim)
- # ensure raster metadata preserved
- scales = []
- offsets = []
- nodatavals = []
- band_tags = []
- long_name = []
- for data_var in data_array[variable_dim].values:
- scales.append(self._obj[data_var].attrs.get("scale_factor", 1.0))
- offsets.append(self._obj[data_var].attrs.get("add_offset", 0.0))
- long_name.append(self._obj[data_var].attrs.get("long_name", data_var))
- nodatavals.append(self._obj[data_var].rio.nodata)
- band_tags.append(self._obj[data_var].attrs.copy())
- data_array.attrs["scales"] = scales
- data_array.attrs["offsets"] = offsets
- data_array.attrs["band_tags"] = band_tags
- data_array.attrs["long_name"] = long_name
-
- nodata = nodatavals[0]
- if (
- all(nodataval == nodata for nodataval in nodatavals)
- or numpy.isnan(nodatavals).all()
- ):
- data_array.rio.write_nodata(nodata, inplace=True)
- else:
- raise RioXarrayError(
- "All nodata values must be the same when exporting to raster. "
- f"Current values: {nodatavals}"
- )
- if self.crs is not None:
- data_array.rio.write_crs(self.crs, inplace=True)
- # write it to a raster
- return data_array.rio.set_spatial_dims(
- x_dim=self.x_dim,
- y_dim=self.y_dim,
- inplace=True,
- ).rio.to_raster(
- raster_path=raster_path,
- driver=driver,
- dtype=dtype,
- tags=tags,
- windowed=windowed,
- recalc_transform=recalc_transform,
- lock=lock,
- compute=compute,
- **profile_kwargs,
- )
-
-"""
-This module is an extension for xarray to provide rasterio capabilities
-to xarray datasets/dataarrays.
-"""
-# pylint: disable=too-many-lines
-import math
-import warnings
-from collections.abc import Hashable, Iterable
-from typing import Any, Literal, Optional, Union
-
-import numpy
-import pyproj
-import rasterio.warp
-import rasterio.windows
-import xarray
-from affine import Affine
-from rasterio.control import GroundControlPoint
-from rasterio.crs import CRS
-
-from rioxarray._options import EXPORT_GRID_MAPPING, get_option
-from rioxarray.crs import crs_from_user_input
-from rioxarray.exceptions import (
- DimensionError,
- DimensionMissingCoordinateError,
- InvalidDimensionOrder,
- MissingCRS,
- MissingSpatialDimensionError,
- NoDataInBounds,
- OneDimensionalRaster,
- RioXarrayError,
- TooManyDimensions,
-)
-
-DEFAULT_GRID_MAP = "spatial_ref"
-
-
-def _affine_has_rotation(affine: Affine) -> bool:
- """
- Determine if the affine has rotation.
-
- Parameters
- ----------
- affine: :obj:`affine.Affine`
- The affine of the grid.
-
- Returns
- -------
- bool
- """
- return affine.b == affine.d != 0
-
-
-def _resolution(affine: Affine) -> tuple[float, float]:
- """
- Determine if the resolution of the affine.
- If it has rotation, the sign of the resolution is lost.
-
- Based on: https://github.com/mapbox/rasterio/blob/6185a4e4ad72b5669066d2d5004bf46d94a6d298/rasterio/_base.pyx#L943-L951
-
- Parameters
- ----------
- affine: :obj:`affine.Affine`
- The affine of the grid.
-
-
- Returns
- --------
- x_resolution: float
- The X resolution of the affine.
- y_resolution: float
- The Y resolution of the affine.
- """
- if not _affine_has_rotation(affine):
- return affine.a, affine.e
- return (
- math.sqrt(affine.a**2 + affine.d**2),
- math.sqrt(affine.b**2 + affine.e**2),
- )
-
-
-def affine_to_coords(
- affine: Affine, width: int, height: int, x_dim: str = "x", y_dim: str = "y"
-) -> dict[str, numpy.ndarray]:
- """Generate 1d pixel centered coordinates from affine.
-
- Based on code from the xarray rasterio backend.
-
- Parameters
- ----------
- affine: :obj:`affine.Affine`
- The affine of the grid.
- width: int
- The width of the grid.
- height: int
- The height of the grid.
- x_dim: str, optional
- The name of the X dimension. Default is 'x'.
- y_dim: str, optional
- The name of the Y dimension. Default is 'y'.
-
- Returns
- -------
- dict: x and y coordinate arrays.
-
- """
- transform = affine * affine.translation(0.5, 0.5)
- if affine.is_rectilinear and not _affine_has_rotation(affine):
- x_coords, _ = transform * (numpy.arange(width), numpy.zeros(width))
- _, y_coords = transform * (numpy.zeros(height), numpy.arange(height))
- else:
- x_coords, y_coords = transform * numpy.meshgrid(
- numpy.arange(width),
- numpy.arange(height),
- )
- return {y_dim: y_coords, x_dim: x_coords}
-
-
-def _generate_spatial_coords(
- affine: Affine, width: int, height: int
-) -> dict[Hashable, Any]:
- """get spatial coords in new transform"""
- new_spatial_coords = affine_to_coords(affine, width, height)
- if new_spatial_coords["x"].ndim == 1:
- return {
- "x": xarray.IndexVariable("x", new_spatial_coords["x"]),
- "y": xarray.IndexVariable("y", new_spatial_coords["y"]),
- }
- return {
- "xc": (("y", "x"), new_spatial_coords["x"]),
- "yc": (("y", "x"), new_spatial_coords["y"]),
- }
-
-
-def _get_nonspatial_coords(
- src_data_array: Union[xarray.DataArray, xarray.Dataset]
-) -> dict[Hashable, Union[xarray.Variable, xarray.IndexVariable]]:
- coords: dict[Hashable, Union[xarray.Variable, xarray.IndexVariable]] = {}
- for coord in set(src_data_array.coords) - {
- src_data_array.rio.x_dim,
- src_data_array.rio.y_dim,
- DEFAULT_GRID_MAP,
- }:
- if src_data_array[coord].dims:
- coords[coord] = xarray.IndexVariable(
- src_data_array[coord].dims,
- src_data_array[coord].values,
- src_data_array[coord].attrs,
- )
- else:
- coords[coord] = xarray.Variable(
- src_data_array[coord].dims,
- src_data_array[coord].values,
- src_data_array[coord].attrs,
- )
- return coords
-
-
-def _make_coords(
- src_data_array: Union[xarray.DataArray, xarray.Dataset],
- dst_affine: Affine,
- dst_width: int,
- dst_height: int,
- force_generate: bool = False,
-) -> dict[Hashable, Any]:
- """Generate the coordinates of the new projected `xarray.DataArray`"""
- coords = _get_nonspatial_coords(src_data_array)
- if force_generate or (
- src_data_array.rio.x_dim in src_data_array.coords
- and src_data_array.rio.y_dim in src_data_array.coords
- ):
- new_coords = _generate_spatial_coords(dst_affine, dst_width, dst_height)
- new_coords.update(coords)
- return new_coords
- return coords
-
-
-def _get_data_var_message(obj: Union[xarray.DataArray, xarray.Dataset]) -> str:
- """
- Get message for named data variables.
- """
- try:
- return f" Data variable: {obj.name}" if obj.name else ""
- except AttributeError:
- return ""
-
-
-def _get_spatial_dims(
- obj: Union[xarray.Dataset, xarray.DataArray], var: Union[Any, Hashable]
-) -> tuple[str, str]:
- """
- Retrieve the spatial dimensions of the dataset
- """
- try:
- return obj[var].rio.x_dim, obj[var].rio.y_dim
- except MissingSpatialDimensionError as err:
- try:
- obj[var].rio.set_spatial_dims(
- x_dim=obj.rio.x_dim, y_dim=obj.rio.y_dim, inplace=True
- )
- return obj.rio.x_dim, obj.rio.y_dim
- except MissingSpatialDimensionError:
- raise err from None
-
-
-def _has_spatial_dims(
- obj: Union[xarray.Dataset, xarray.DataArray], var: Union[Any, Hashable]
-) -> bool:
- """
- Check to see if the variable in the Dataset has spatial dimensions
- """
- try:
- # pylint: disable=pointless-statement
- _get_spatial_dims(obj, var)
- except MissingSpatialDimensionError:
- return False
- return True
-
-
-def _order_bounds(
- minx: float,
- miny: float,
- maxx: float,
- maxy: float,
- resolution_x: float,
- resolution_y: float,
-) -> tuple[float, float, float, float]:
- """
- Make sure that the bounds are in the correct order
- """
- if resolution_y < 0:
- top = maxy
- bottom = miny
- else:
- top = miny
- bottom = maxy
- if resolution_x < 0:
- left = maxx
- right = minx
- else:
- left = minx
- right = maxx
-
- return left, bottom, right, top
-
-
-[docs]class XRasterBase:
- """This is the base class for the GIS extensions for xarray"""
-
- def __init__(self, xarray_obj: Union[xarray.DataArray, xarray.Dataset]):
- self._obj: Union[xarray.DataArray, xarray.Dataset] = xarray_obj
-
- self._x_dim: Optional[Hashable] = None
- self._y_dim: Optional[Hashable] = None
- # Determine the spatial dimensions of the `xarray.DataArray`
- if "x" in self._obj.dims and "y" in self._obj.dims:
- self._x_dim = "x"
- self._y_dim = "y"
- elif "longitude" in self._obj.dims and "latitude" in self._obj.dims:
- self._x_dim = "longitude"
- self._y_dim = "latitude"
- else:
- # look for coordinates with CF attributes
- for coord in self._obj.coords:
- # make sure to only look in 1D coordinates
- # that has the same dimension name as the coordinate
- if self._obj.coords[coord].dims != (coord,):
- continue
- if (self._obj.coords[coord].attrs.get("axis", "").upper() == "X") or (
- self._obj.coords[coord].attrs.get("standard_name", "").lower()
- in ("longitude", "projection_x_coordinate")
- ):
- self._x_dim = coord
- elif (self._obj.coords[coord].attrs.get("axis", "").upper() == "Y") or (
- self._obj.coords[coord].attrs.get("standard_name", "").lower()
- in ("latitude", "projection_y_coordinate")
- ):
- self._y_dim = coord
-
- # properties
- self._count: Optional[int] = None
- self._height: Optional[int] = None
- self._width: Optional[int] = None
- self._crs: Union[rasterio.crs.CRS, None, Literal[False]] = None
-
- @property
- def crs(self) -> Optional[rasterio.crs.CRS]:
- """:obj:`rasterio.crs.CRS`:
- Retrieve projection from :obj:`xarray.Dataset` | :obj:`xarray.DataArray`
- """
- if self._crs is not None:
- return None if self._crs is False else self._crs
-
- # look in wkt attributes to avoid using
- # pyproj CRS if possible for performance
- for crs_attr in ("spatial_ref", "crs_wkt"):
- try:
- self.set_crs(
- self._obj.coords[self.grid_mapping].attrs[crs_attr],
- inplace=True,
- )
- return self._crs
- except KeyError:
- pass
-
- # look in grid_mapping
- try:
- self.set_crs(
- pyproj.CRS.from_cf(self._obj.coords[self.grid_mapping].attrs),
- inplace=True,
- )
- except (KeyError, pyproj.exceptions.CRSError):
- try:
- # look in attrs for 'crs'
- self.set_crs(self._obj.attrs["crs"], inplace=True)
- except KeyError:
- self._crs = False
- return None
- return self._crs
-
- def _get_obj(self, inplace: bool) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- Get the object to modify.
-
- Parameters
- ----------
- inplace: bool
- If True, returns self.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`
- """
- if inplace:
- return self._obj
- obj_copy = self._obj.copy(deep=True)
- # preserve attribute information
- obj_copy.rio._x_dim = self._x_dim
- obj_copy.rio._y_dim = self._y_dim
- obj_copy.rio._width = self._width
- obj_copy.rio._height = self._height
- obj_copy.rio._crs = self._crs
- return obj_copy
-
-[docs] def set_crs(
- self, input_crs: Any, inplace: bool = True
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- Set the CRS value for the Dataset/DataArray without modifying
- the dataset/data array.
-
- Parameters
- ----------
- input_crs: object
- Anything accepted by `rasterio.crs.CRS.from_user_input`.
- inplace: bool, optional
- If True, it will write to the existing dataset. Default is False.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- Dataset with crs attribute.
- """
- crs = crs_from_user_input(input_crs)
- obj = self._get_obj(inplace=inplace)
- obj.rio._crs = crs
- return obj
-
- @property
- def grid_mapping(self) -> str:
- """
- str: The CF grid_mapping attribute. 'spatial_ref' is the default.
- """
- grid_mapping = self._obj.encoding.get(
- "grid_mapping", self._obj.attrs.get("grid_mapping")
- )
- if grid_mapping is not None:
- return grid_mapping
- grid_mapping = DEFAULT_GRID_MAP
- # search the dataset for the grid mapping name
- if hasattr(self._obj, "data_vars"):
- grid_mappings = set()
- for var in self._obj.data_vars:
- if not _has_spatial_dims(self._obj, var):
- continue
- var_grid_mapping = self._obj[var].encoding.get(
- "grid_mapping", self._obj[var].attrs.get("grid_mapping")
- )
- if var_grid_mapping is not None:
- grid_mapping = var_grid_mapping
- grid_mappings.add(grid_mapping)
- if len(grid_mappings) > 1:
- raise RioXarrayError("Multiple grid mappings exist.")
- return grid_mapping
-
-[docs] def write_grid_mapping(
- self, grid_mapping_name: str = DEFAULT_GRID_MAP, inplace: bool = False
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- Write the CF grid_mapping attribute to the encoding.
-
- Parameters
- ----------
- grid_mapping_name: str, optional
- Name of the grid_mapping coordinate.
- inplace: bool, optional
- If True, it will write to the existing dataset. Default is False.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- Modified dataset with CF compliant CRS information.
- """
- data_obj = self._get_obj(inplace=inplace)
- if hasattr(data_obj, "data_vars"):
- for var in data_obj.data_vars:
- try:
- x_dim, y_dim = _get_spatial_dims(data_obj, var)
- except MissingSpatialDimensionError:
- continue
- # remove grid_mapping from attributes if it exists
- # and update the grid_mapping in encoding
- new_attrs = dict(data_obj[var].attrs)
- new_attrs.pop("grid_mapping", None)
- data_obj[var].rio.update_encoding(
- {"grid_mapping": grid_mapping_name}, inplace=True
- ).rio.update_attrs(new_attrs, inplace=True).rio.set_spatial_dims(
- x_dim=x_dim, y_dim=y_dim, inplace=True
- )
- # remove grid_mapping from attributes if it exists
- # and update the grid_mapping in encoding
- new_attrs = dict(data_obj.attrs)
- new_attrs.pop("grid_mapping", None)
- return data_obj.rio.update_encoding(
- {"grid_mapping": grid_mapping_name}, inplace=True
- ).rio.update_attrs(new_attrs, inplace=True)
-
-[docs] def write_crs(
- self,
- input_crs: Optional[Any] = None,
- grid_mapping_name: Optional[str] = None,
- inplace: bool = False,
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- Write the CRS to the dataset in a CF compliant manner.
-
- .. warning:: The grid_mapping attribute is written to the encoding.
-
- Parameters
- ----------
- input_crs: Any
- Anything accepted by `rasterio.crs.CRS.from_user_input`.
- grid_mapping_name: str, optional
- Name of the grid_mapping coordinate to store the CRS information in.
- Default is the grid_mapping name of the dataset.
- inplace: bool, optional
- If True, it will write to the existing dataset. Default is False.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- Modified dataset with CF compliant CRS information.
-
- Examples
- --------
- Write the CRS of the current `xarray` object:
-
- >>> raster.rio.write_crs("epsg:4326", inplace=True)
-
- Write the CRS on a copy:
-
- >>> raster = raster.rio.write_crs("epsg:4326")
- """
- if input_crs is not None:
- data_obj = self.set_crs(input_crs, inplace=inplace)
- else:
- data_obj = self._get_obj(inplace=inplace)
-
- # get original transform
- transform = self._cached_transform()
- # remove old grid maping coordinate if exists
- grid_mapping_name = (
- self.grid_mapping if grid_mapping_name is None else grid_mapping_name
- )
- try:
- del data_obj.coords[grid_mapping_name]
- except KeyError:
- pass
-
- if data_obj.rio.crs is None:
- raise MissingCRS(
- "CRS not found. Please set the CRS with 'rio.write_crs()'."
- )
- # add grid mapping coordinate
- data_obj.coords[grid_mapping_name] = xarray.Variable((), 0)
- grid_map_attrs = {}
- if get_option(EXPORT_GRID_MAPPING):
- try:
- grid_map_attrs = pyproj.CRS.from_user_input(data_obj.rio.crs).to_cf()
- except KeyError:
- pass
- # spatial_ref is for compatibility with GDAL
- crs_wkt = data_obj.rio.crs.to_wkt()
- grid_map_attrs["spatial_ref"] = crs_wkt
- grid_map_attrs["crs_wkt"] = crs_wkt
- if transform is not None:
- grid_map_attrs["GeoTransform"] = " ".join(
- [str(item) for item in transform.to_gdal()]
- )
- data_obj.coords[grid_mapping_name].rio.set_attrs(grid_map_attrs, inplace=True)
-
- # remove old crs if exists
- data_obj.attrs.pop("crs", None)
-
- return data_obj.rio.write_grid_mapping(
- grid_mapping_name=grid_mapping_name, inplace=True
- )
-
-[docs] def estimate_utm_crs(self, datum_name: str = "WGS 84") -> rasterio.crs.CRS:
- """Returns the estimated UTM CRS based on the bounds of the dataset.
-
- .. versionadded:: 0.2
-
- .. note:: Requires pyproj 3+
-
- Parameters
- ----------
- datum_name : str, optional
- The name of the datum to use in the query. Default is WGS 84.
-
- Returns
- -------
- rasterio.crs.CRS
- """
- # pylint: disable=import-outside-toplevel
- try:
- from pyproj.aoi import AreaOfInterest
- from pyproj.database import query_utm_crs_info
- except ImportError:
- raise RuntimeError("pyproj 3+ required for estimate_utm_crs.") from None
-
- if self.crs is None:
- raise RuntimeError("crs must be set to estimate UTM CRS.")
-
- # ensure using geographic coordinates
- if self.crs.is_geographic: # pylint: disable=no-member
- minx, miny, maxx, maxy = self.bounds(recalc=True)
- else:
- minx, miny, maxx, maxy = self.transform_bounds("EPSG:4326", recalc=True)
-
- x_center = numpy.mean([minx, maxx]).item()
- y_center = numpy.mean([miny, maxy]).item()
-
- utm_crs_list = query_utm_crs_info(
- datum_name=datum_name,
- area_of_interest=AreaOfInterest(
- west_lon_degree=x_center,
- south_lat_degree=y_center,
- east_lon_degree=x_center,
- north_lat_degree=y_center,
- ),
- )
- try:
- return CRS.from_epsg(utm_crs_list[0].code)
- except IndexError:
- raise RuntimeError("Unable to determine UTM CRS") from None
-
- def _cached_transform(self) -> Optional[Affine]:
- """
- Get the transform from:
- 1. The GeoTransform metatada property in the grid mapping
- 2. The transform attribute.
- """
- try:
- # look in grid_mapping
- transform = numpy.fromstring(
- self._obj.coords[self.grid_mapping].attrs["GeoTransform"], sep=" "
- )
- # Calling .tolist() to assure the arguments are Python float and JSON serializable
- return Affine.from_gdal(*transform.tolist())
-
- except KeyError:
- try:
- return Affine(*self._obj.attrs["transform"][:6])
- except KeyError:
- pass
- return None
-
-[docs] def write_transform(
- self,
- transform: Optional[Affine] = None,
- grid_mapping_name: Optional[str] = None,
- inplace: bool = False,
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- .. versionadded:: 0.0.30
-
- Write the GeoTransform to the dataset where GDAL can read it in.
-
- https://gdal.org/drivers/raster/netcdf.html#georeference
-
- Parameters
- ----------
- transform: affine.Affine, optional
- The transform of the dataset. If not provided, it will be calculated.
- grid_mapping_name: str, optional
- Name of the grid_mapping coordinate to store the transform information in.
- Default is the grid_mapping name of the dataset.
- inplace: bool, optional
- If True, it will write to the existing dataset. Default is False.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- Modified dataset with Geo Transform written.
- """
- transform = transform or self.transform(recalc=True)
- data_obj = self._get_obj(inplace=inplace)
- # delete the old attribute to prevent confusion
- data_obj.attrs.pop("transform", None)
- grid_mapping_name = (
- self.grid_mapping if grid_mapping_name is None else grid_mapping_name
- )
- try:
- grid_map_attrs = data_obj.coords[grid_mapping_name].attrs.copy()
- except KeyError:
- data_obj.coords[grid_mapping_name] = xarray.Variable((), 0)
- grid_map_attrs = data_obj.coords[grid_mapping_name].attrs.copy()
- grid_map_attrs["GeoTransform"] = " ".join(
- [str(item) for item in transform.to_gdal()]
- )
- data_obj.coords[grid_mapping_name].rio.set_attrs(grid_map_attrs, inplace=True)
- return data_obj.rio.write_grid_mapping(
- grid_mapping_name=grid_mapping_name, inplace=True
- )
-
-[docs] def transform(self, recalc: bool = False) -> Affine:
- """
- Parameters
- ----------
- recalc: bool, optional
- If True, it will re-calculate the transform instead of using
- the cached transform.
-
- Returns
- -------
- :obj:`affine.Affine`:
- The affine of the :obj:`xarray.Dataset` | :obj:`xarray.DataArray`
- """
- transform = self._cached_transform()
- if transform and (
- not transform.is_rectilinear or _affine_has_rotation(transform)
- ):
- if recalc:
- warnings.warn(
- "Transform that is non-rectilinear or with rotation found. "
- "Unable to recalculate."
- )
- return transform
-
- try:
- src_left, _, _, src_top = self._unordered_bounds(recalc=recalc)
- src_resolution_x, src_resolution_y = self.resolution(recalc=recalc)
- except (DimensionMissingCoordinateError, DimensionError):
- return Affine.identity() if transform is None else transform
- return Affine.translation(src_left, src_top) * Affine.scale(
- src_resolution_x, src_resolution_y
- )
-
-[docs] def write_coordinate_system(
- self, inplace: bool = False
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- Write the coordinate system CF metadata.
-
- .. versionadded:: 0.0.30
-
- Parameters
- ----------
- inplace: bool, optional
- If True, it will write to the existing dataset. Default is False.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- The dataset with the CF coordinate system attributes added.
- """
- data_obj = self._get_obj(inplace=inplace)
- # add metadata to x,y coordinates
- is_projected = data_obj.rio.crs and data_obj.rio.crs.is_projected
- is_geographic = data_obj.rio.crs and data_obj.rio.crs.is_geographic
- x_coord_attrs = dict(data_obj.coords[self.x_dim].attrs)
- x_coord_attrs["axis"] = "X"
- y_coord_attrs = dict(data_obj.coords[self.y_dim].attrs)
- y_coord_attrs["axis"] = "Y"
- if is_projected:
- units = None
- if hasattr(data_obj.rio.crs, "linear_units_factor"):
- unit_factor = data_obj.rio.crs.linear_units_factor[-1]
- if unit_factor != 1:
- units = f"{unit_factor} metre"
- else:
- units = "metre"
- # X metadata
- x_coord_attrs["long_name"] = "x coordinate of projection"
- x_coord_attrs["standard_name"] = "projection_x_coordinate"
- if units:
- x_coord_attrs["units"] = units
- # Y metadata
- y_coord_attrs["long_name"] = "y coordinate of projection"
- y_coord_attrs["standard_name"] = "projection_y_coordinate"
- if units:
- y_coord_attrs["units"] = units
- elif is_geographic:
- # X metadata
- x_coord_attrs["long_name"] = "longitude"
- x_coord_attrs["standard_name"] = "longitude"
- x_coord_attrs["units"] = "degrees_east"
- # Y metadata
- y_coord_attrs["long_name"] = "latitude"
- y_coord_attrs["standard_name"] = "latitude"
- y_coord_attrs["units"] = "degrees_north"
- data_obj.coords[self.y_dim].attrs = y_coord_attrs
- data_obj.coords[self.x_dim].attrs = x_coord_attrs
- return data_obj
-
-[docs] def set_attrs(
- self, new_attrs: dict, inplace: bool = False
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- Set the attributes of the dataset/dataarray and reset
- rioxarray properties to re-search for them.
-
- Parameters
- ----------
- new_attrs: dict
- A dictionary of new attributes.
- inplace: bool, optional
- If True, it will write to the existing dataset. Default is False.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- Modified dataset with new attributes.
- """
- data_obj = self._get_obj(inplace=inplace)
- # set the attributes
- data_obj.attrs = new_attrs
- # reset rioxarray properties depending
- # on attributes to be generated
- data_obj.rio._nodata = None
- data_obj.rio._crs = None
- return data_obj
-
-[docs] def update_attrs(
- self, new_attrs: dict, inplace: bool = False
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- Update the attributes of the dataset/dataarray and reset
- rioxarray properties to re-search for them.
-
- Parameters
- ----------
- new_attrs: dict
- A dictionary of new attributes to update with.
- inplace: bool, optional
- If True, it will write to the existing dataset. Default is False.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- Modified dataset with updated attributes.
- """
- data_attrs = dict(self._obj.attrs)
- data_attrs.update(**new_attrs)
- return self.set_attrs(data_attrs, inplace=inplace)
-
-[docs] def set_encoding(
- self, new_encoding: dict, inplace: bool = False
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- Set the encoding of the dataset/dataarray and reset
- rioxarray properties to re-search for them.
-
- .. versionadded:: 0.4
-
- Parameters
- ----------
- new_encoding: dict
- A dictionary for encoding.
- inplace: bool, optional
- If True, it will write to the existing dataset. Default is False.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- Modified dataset with new attributes.
- """
- data_obj = self._get_obj(inplace=inplace)
- # set the attributes
- data_obj.encoding = new_encoding
- # reset rioxarray properties depending
- # on attributes to be generated
- data_obj.rio._nodata = None
- data_obj.rio._crs = None
- return data_obj
-
-[docs] def update_encoding(
- self, new_encoding: dict, inplace: bool = False
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- Update the encoding of the dataset/dataarray and reset
- rioxarray properties to re-search for them.
-
- .. versionadded:: 0.4
-
- Parameters
- ----------
- new_encoding: dict
- A dictionary with encoding values to update with.
- inplace: bool, optional
- If True, it will write to the existing dataset. Default is False.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- Modified dataset with updated attributes.
- """
- data_encoding = dict(self._obj.encoding)
- data_encoding.update(**new_encoding)
- return self.set_encoding(data_encoding, inplace=inplace)
-
-[docs] def set_spatial_dims(
- self, x_dim: str, y_dim: str, inplace: bool = True
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- This sets the spatial dimensions of the dataset.
-
- Parameters
- ----------
- x_dim: str
- The name of the x dimension.
- y_dim: str
- The name of the y dimension.
- inplace: bool, optional
- If True, it will modify the dataframe in place.
- Otherwise it will return a modified copy.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- Dataset with spatial dimensions set.
- """
-
- data_obj = self._get_obj(inplace=inplace)
- if x_dim in data_obj.dims:
- data_obj.rio._x_dim = x_dim
- else:
- raise MissingSpatialDimensionError(
- f"x dimension ({x_dim}) not found.{_get_data_var_message(data_obj)}"
- )
- if y_dim in data_obj.dims:
- data_obj.rio._y_dim = y_dim
- else:
- raise MissingSpatialDimensionError(
- f"y dimension ({y_dim}) not found.{_get_data_var_message(data_obj)}"
- )
- return data_obj
-
- @property
- def x_dim(self) -> Hashable:
- """Hashable: The dimension for the X-axis."""
- if self._x_dim is not None:
- return self._x_dim
- raise MissingSpatialDimensionError(
- "x dimension not found. 'rio.set_spatial_dims()' or "
- "using 'rename()' to change the dimension name to 'x' can address this."
- f"{_get_data_var_message(self._obj)}"
- )
-
- @property
- def y_dim(self) -> Hashable:
- """Hashable: The dimension for the Y-axis."""
- if self._y_dim is not None:
- return self._y_dim
- raise MissingSpatialDimensionError(
- "y dimension not found. 'rio.set_spatial_dims()' or "
- "using 'rename()' to change the dimension name to 'y' can address this."
- f"{_get_data_var_message(self._obj)}"
- )
-
- @property
- def width(self) -> int:
- """int: Returns the width of the dataset (x dimension size)"""
- if self._width is not None:
- return self._width
- self._width = self._obj[self.x_dim].size
- return self._width
-
- @property
- def height(self) -> int:
- """int: Returns the height of the dataset (y dimension size)"""
- if self._height is not None:
- return self._height
- self._height = self._obj[self.y_dim].size
- return self._height
-
- @property
- def shape(self) -> tuple[int, int]:
- """tuple(int, int): Returns the shape (height, width)"""
- return (self.height, self.width)
-
- def _check_dimensions(self) -> Optional[str]:
- """
- This function validates that the dimensions 2D/3D and
- they are are in the proper order.
-
- Returns
- -------
- str or None: Name extra dimension.
- """
- extra_dims = tuple(set(list(self._obj.dims)) - {self.x_dim, self.y_dim})
- if len(extra_dims) > 1:
- raise TooManyDimensions(
- "Only 2D and 3D data arrays supported."
- f"{_get_data_var_message(self._obj)}"
- )
- if extra_dims and self._obj.dims != (extra_dims[0], self.y_dim, self.x_dim):
- dim_info: tuple = (extra_dims[0], self.y_dim, self.x_dim)
- raise InvalidDimensionOrder(
- f"Invalid dimension order. Expected order: {dim_info}. "
- f"You can use `DataArray.transpose{dim_info}`"
- " to reorder your dimensions."
- f"{_get_data_var_message(self._obj)}"
- )
- if not extra_dims and self._obj.dims != (self.y_dim, self.x_dim):
- dim_info = (self.y_dim, self.x_dim)
- raise InvalidDimensionOrder(
- f"Invalid dimension order. Expected order: {dim_info}. "
- f"You can use `DataArray.transpose{dim_info}`"
- " to reorder your dimensions."
- f"{_get_data_var_message(self._obj)}"
- )
- return str(extra_dims[0]) if extra_dims else None
-
- @property
- def count(self) -> int:
- """int: Returns the band count (z dimension size)"""
- if self._count is not None:
- return self._count
- extra_dim = self._check_dimensions()
- self._count = 1
- if extra_dim is not None:
- self._count = self._obj[extra_dim].size
- return self._count
-
- def _internal_bounds(self) -> tuple[float, float, float, float]:
- """Determine the internal bounds of the `xarray.DataArray`"""
- if self.x_dim not in self._obj.coords:
- raise DimensionMissingCoordinateError(f"{self.x_dim} missing coordinates.")
- if self.y_dim not in self._obj.coords:
- raise DimensionMissingCoordinateError(f"{self.y_dim} missing coordinates.")
- try:
- left = float(self._obj[self.x_dim][0])
- right = float(self._obj[self.x_dim][-1])
- top = float(self._obj[self.y_dim][0])
- bottom = float(self._obj[self.y_dim][-1])
- except IndexError:
- raise NoDataInBounds(
- "Unable to determine bounds from coordinates."
- f"{_get_data_var_message(self._obj)}"
- ) from None
- return left, bottom, right, top
-
-[docs] def resolution(self, recalc: bool = False) -> tuple[float, float]:
- """
- Determine if the resolution of the grid.
- If the transformation has rotation, the sign of the resolution is lost.
-
- Parameters
- ----------
- recalc: bool, optional
- Will force the resolution to be recalculated instead of using the
- transform attribute.
-
- Returns
- -------
- x_resolution, y_resolution: float
- The resolution of the `xarray.DataArray` | `xarray.Dataset`
- """
- transform = self._cached_transform()
-
- if (
- not recalc or self.width == 1 or self.height == 1
- ) and transform is not None:
- return _resolution(transform)
-
- # if the coordinates of the spatial dimensions are missing
- # use the cached transform resolution
- try:
- left, bottom, right, top = self._internal_bounds()
- except DimensionMissingCoordinateError:
- if transform is None:
- raise
- return _resolution(transform)
-
- if self.width == 1 or self.height == 1:
- raise OneDimensionalRaster(
- "Only 1 dimenional array found. Cannot calculate the resolution."
- f"{_get_data_var_message(self._obj)}"
- )
-
- resolution_x = (right - left) / (self.width - 1)
- resolution_y = (bottom - top) / (self.height - 1)
- return resolution_x, resolution_y
-
- def _unordered_bounds(
- self, recalc: bool = False
- ) -> tuple[float, float, float, float]:
- """
- Unordered bounds.
-
- Parameters
- ----------
- recalc: bool, optional
- Will force the bounds to be recalculated instead of using the
- transform attribute.
-
- Returns
- -------
- left, bottom, right, top: float
- Outermost coordinates of the `xarray.DataArray` | `xarray.Dataset`.
- """
- resolution_x, resolution_y = self.resolution(recalc=recalc)
-
- try:
- # attempt to get bounds from xarray coordinate values
- left, bottom, right, top = self._internal_bounds()
- left -= resolution_x / 2.0
- right += resolution_x / 2.0
- top -= resolution_y / 2.0
- bottom += resolution_y / 2.0
- except DimensionMissingCoordinateError as error:
- transform = self._cached_transform()
- if not transform:
- raise RioXarrayError("Transform not able to be determined.") from error
- left = transform.c
- top = transform.f
- right = left + resolution_x * self.width
- bottom = top + resolution_y * self.height
-
- return left, bottom, right, top
-
-[docs] def bounds(self, recalc: bool = False) -> tuple[float, float, float, float]:
- """
- Parameters
- ----------
- recalc: bool, optional
- Will force the bounds to be recalculated instead of using the
- transform attribute.
-
- Returns
- -------
- left, bottom, right, top: float
- Outermost coordinates of the `xarray.DataArray` | `xarray.Dataset`.
- """
- return _order_bounds(
- *self._unordered_bounds(recalc=recalc),
- *self.resolution(recalc=recalc),
- )
-
-[docs] def isel_window(
- self, window: rasterio.windows.Window, pad: bool = False
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- Use a rasterio.windows.Window to select a subset of the data.
-
- .. versionadded:: 0.6.0 pad
-
- .. warning:: Float indices are converted to integers.
-
- Parameters
- ----------
- window: :class:`rasterio.windows.Window`
- The window of the dataset to read.
- pad: bool, default=False
- Set to True to expand returned DataArray to dimensions of the window
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- The data in the window.
- """
- (row_start, row_stop), (col_start, col_stop) = window.toranges()
- row_start = 0 if row_start < 0 else math.floor(row_start)
- row_stop = 0 if row_stop < 0 else math.ceil(row_stop)
- col_start = 0 if col_start < 0 else math.floor(col_start)
- col_stop = 0 if col_stop < 0 else math.ceil(col_stop)
- row_slice = slice(int(row_start), int(row_stop))
- col_slice = slice(int(col_start), int(col_stop))
- array_subset = (
- self._obj.isel({self.y_dim: row_slice, self.x_dim: col_slice})
- .copy() # this is to prevent sharing coordinates with the original dataset
- .rio.set_spatial_dims(x_dim=self.x_dim, y_dim=self.y_dim, inplace=True)
- .rio.write_transform(
- transform=rasterio.windows.transform(
- rasterio.windows.Window.from_slices(
- rows=row_slice,
- cols=col_slice,
- width=self.width,
- height=self.height,
- ),
- self.transform(recalc=True),
- ),
- inplace=True,
- )
- )
- if pad:
- return array_subset.rio.pad_box(
- *rasterio.windows.bounds(window, self.transform(recalc=True))
- )
- return array_subset
-
-[docs] def slice_xy(
- self,
- minx: float,
- miny: float,
- maxx: float,
- maxy: float,
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """Slice the array by x,y bounds.
-
- Parameters
- ----------
- minx: float
- Minimum bound for x coordinate.
- miny: float
- Minimum bound for y coordinate.
- maxx: float
- Maximum bound for x coordinate.
- maxy: float
- Maximum bound for y coordinate.
-
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- The data in the slice.
- """
- left, bottom, right, top = self._internal_bounds()
- if top > bottom:
- y_slice = slice(maxy, miny)
- else:
- y_slice = slice(miny, maxy)
-
- if left > right:
- x_slice = slice(maxx, minx)
- else:
- x_slice = slice(minx, maxx)
-
- subset = (
- self._obj.sel({self.x_dim: x_slice, self.y_dim: y_slice})
- .copy() # this is to prevent sharing coordinates with the original dataset
- .rio.set_spatial_dims(x_dim=self.x_dim, y_dim=self.y_dim, inplace=True)
- .rio.write_transform(inplace=True)
- )
- return subset
-
-[docs] def transform_bounds(
- self, dst_crs: Any, densify_pts: int = 21, recalc: bool = False
- ) -> tuple[float, float, float, float]:
- """Transform bounds from src_crs to dst_crs.
-
- Optionally densifying the edges (to account for nonlinear transformations
- along these edges) and extracting the outermost bounds.
-
- Note: this does not account for the antimeridian.
-
- Parameters
- ----------
- dst_crs: str, :obj:`rasterio.crs.CRS`, or dict
- Target coordinate reference system.
- densify_pts: uint, optional
- Number of points to add to each edge to account for nonlinear
- edges produced by the transform process. Large numbers will produce
- worse performance. Default: 21 (gdal default).
- recalc: bool, optional
- Will force the bounds to be recalculated instead of using the transform
- attribute.
-
- Returns
- -------
- left, bottom, right, top: float
- Outermost coordinates in target coordinate reference system.
- """
- return rasterio.warp.transform_bounds(
- self.crs, dst_crs, *self.bounds(recalc=recalc), densify_pts=densify_pts
- )
-
-[docs] def write_gcps(
- self,
- gcps: Iterable[GroundControlPoint],
- gcp_crs: Any,
- grid_mapping_name: Optional[str] = None,
- inplace: bool = False,
- ) -> Union[xarray.Dataset, xarray.DataArray]:
- """
- Write the GroundControlPoints to the dataset.
-
- https://rasterio.readthedocs.io/en/latest/topics/georeferencing.html#ground-control-points
-
- Parameters
- ----------
- gcp: list of :obj:`rasterio.control.GroundControlPoint`
- The Ground Control Points to integrate to the dataset.
- gcp_crs: str, :obj:`rasterio.crs.CRS`, or dict
- Coordinate reference system for the GCPs.
- grid_mapping_name: str, optional
- Name of the grid_mapping coordinate to store the GCPs information in.
- Default is the grid_mapping name of the dataset.
- inplace: bool, optional
- If True, it will write to the existing dataset. Default is False.
-
- Returns
- -------
- :obj:`xarray.Dataset` | :obj:`xarray.DataArray`:
- Modified dataset with Ground Control Points written.
- """
- grid_mapping_name = (
- self.grid_mapping if grid_mapping_name is None else grid_mapping_name
- )
- data_obj = self._get_obj(inplace=True)
-
- data_obj = data_obj.rio.write_crs(
- gcp_crs, grid_mapping_name=grid_mapping_name, inplace=inplace
- )
- geojson_gcps = _convert_gcps_to_geojson(gcps)
- data_obj.coords[grid_mapping_name].attrs["gcps"] = geojson_gcps
- return data_obj
-
-[docs] def get_gcps(self) -> Optional[list[GroundControlPoint]]:
- """
- Get the GroundControlPoints from the dataset.
-
- https://rasterio.readthedocs.io/en/latest/topics/georeferencing.html#ground-control-points
-
- Returns
- -------
- list of :obj:`rasterio.control.GroundControlPoint` or None
- The Ground Control Points from the dataset or None if not applicable
- """
- try:
- geojson_gcps = self._obj.coords[self.grid_mapping].attrs["gcps"]
- except (KeyError, AttributeError):
- return None
-
- gcps = [
- GroundControlPoint(
- x=gcp["geometry"]["coordinates"][0],
- y=gcp["geometry"]["coordinates"][1],
- z=gcp["geometry"]["coordinates"][2],
- row=gcp["properties"]["row"],
- col=gcp["properties"]["col"],
- id=gcp["properties"]["id"],
- info=gcp["properties"]["info"],
- )
- for gcp in geojson_gcps["features"]
- ]
- return gcps
-
-
-def _convert_gcps_to_geojson(
- gcps: Iterable[GroundControlPoint],
-) -> dict:
- """
- Convert GCPs to geojson.
-
- Parameters
- ----------
- gcps: The list of GroundControlPoint instances.
-
- Returns
- -------
- A FeatureCollection dict.
- """
- features = [
- {
- "type": "Feature",
- "properties": {
- "id": gcp.id,
- "info": gcp.info,
- "row": gcp.row,
- "col": gcp.col,
- },
- "geometry": {"type": "Point", "coordinates": [gcp.x, gcp.y, gcp.z]},
- }
- for gcp in gcps
- ]
- return {"type": "FeatureCollection", "features": features}
-
' + - '' + - _("Hide Search Matches") + - "
" - ) - ); - }, - - /** - * helper function to hide the search marks again - */ - hideSearchWords: () => { - document - .querySelectorAll("#searchbox .highlight-link") - .forEach((el) => el.remove()); - document - .querySelectorAll("span.highlighted") - .forEach((el) => el.classList.remove("highlighted")); - localStorage.removeItem("sphinx_highlight_terms") - }, - - initEscapeListener: () => { - // only install a listener if it is really needed - if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; - - document.addEventListener("keydown", (event) => { - // bail for input elements - if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; - // bail with special keys - if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; - if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { - SphinxHighlight.hideSearchWords(); - event.preventDefault(); - } - }); - }, -}; - -_ready(SphinxHighlight.highlightSearchWords); -_ready(SphinxHighlight.initEscapeListener); diff --git a/0.15.0/authors.html b/0.15.0/authors.html deleted file mode 100644 index 6cf26e68..00000000 --- a/0.15.0/authors.html +++ /dev/null @@ -1,176 +0,0 @@ - - - - - - -Thanks goes to these wonderful people (emoji key):
- - - -Alan D. Snow 💻 🤔 💬 🐛 📖 💡 🚧 👀 ⚠️ |
- Alfredo Delos Santos 💻 🤔 👀 |
- David Hoese 🤔 👀 💻 ⚠️ |
- Justin Gruca 👀 |
- Vincent Sarago 📖 ⚠️ |
- Rich Signell 🤔 |
- pmallas 💻 🤔 |
-
David Brochart 💻 ⚠️ 🤔 📖 |
- Taher Chegini 💻 🐛 |
- Joe Hamman 💻 🐛 |
- Tom Augspurger 💻 🐛 🤔 📖 |
- RichardScottOZ 📖 |
- Ray Bell 📖 |
- Alessandro Amici 💻 📖 ⚠️ |
-
remi-braun 📖 |
- Scott Henderson 🐛 💻 ⚠️ |
- Andrew Annex 💻 📖 ⚠️ |
- Fred Bunt 🐛 ⚠️ 💻 |
- Markus Zehner 🐛 💻 ⚠️ 🤔 |
- Seth Miller 💻 📖 ⚠️ |
- Martin Raspaud 💻 ⚠️ 📖 🤔 🐛 |
-
Mauricio Cordeiro 🐛 💻 |
- GBallesteros 🐛 💻 ⚠️ |
- apiwat-chantawibul 📖 |
- Mike Taves 🚧 |
- Sangzi Liang 📖 |
- jonasViehweger 💻 🐛 |
- Carlos H Brandt 📖 |
-
Jessica Scheick 📖 |
- clausmichele 👀 |
- Seth Caldwell 🐛 💻 |
- stefank0 📖 |
- Ian Carroll 🚧 |
- Yvonne Fröhlich 📖 |
- Kirill Kouzoubov 💻 |
-
This project follows the all-contributors specification. Contributions of any kind welcome!
-Contributions are welcome, and they are greatly appreciated! Every little bit -helps, and credit will always be given.
-You can contribute in many ways:
-Report bugs at https://github.com/corteva/rioxarray/issues.
-If you are reporting a bug, please include:
-Your operating system name and version.
Any details about your local setup that might be helpful in troubleshooting.
Detailed steps to reproduce the bug.
Look through the GitHub issues for bugs. Anything tagged with “bug” and “help -wanted” is open to whoever wants to implement it.
-Look through the GitHub issues for features. Anything tagged with “enhancement” -and “help wanted” is open to whoever wants to implement it.
-rioxarray could always use more documentation, whether as part of the -official rioxarray docs, in docstrings, or even on the web in blog posts, -articles, and such.
-The best way to send feedback is to file an issue at https://github.com/corteva/rioxarray/issues.
-If you are proposing a feature:
-Explain in detail how it would work.
Keep the scope as narrow as possible, to make it easier to implement.
Remember that this is a volunteer-driven project, and that contributions -are welcome :)
Ready to contribute? Here’s how to set up rioxarray for local development.
-Fork the rioxarray repo on GitHub.
Clone your fork locally:
-$ git clone git@github.com:your_name_here/rioxarray.git
-
Create a python virtual environment
Using conda:
-$ cd rioxarray/
-$ conda env create
-$ conda activate rioxarray
-
Using python:
-$ cd rioxarray/
-$ python -m venv venv
-$ . venv/bin/activate
-
Install your local copy into a virtualenv.
---$ pip install -e .[dev]
-
Setup pre-commit hooks:
-$ pre-commit install
-
Create a branch for local development:
-$ git checkout -b name-of-your-bugfix-or-feature
-
Now you can make your changes locally.
-When you’re done making changes, check that the tests pass:
-$ pytest
-
Commit your changes and push your branch to GitHub (this should trigger pre-commit checks):
-$ git add .
-$ git commit -m "Your detailed description of your changes."
-$ git push origin name-of-your-bugfix-or-feature
-
Submit a pull request through the GitHub website.
This assumes you have cloned the rioxarray repository and are in the base folder.
-Build the docker image
docker build -t rioxarray .
-
Run the tests
docker run --rm \
- -v $PWD/test/:/app/test \
- -t rioxarray \
- 'source /venv/bin/activate && python -m pytest'
-
Before you submit a pull request, check that it meets these guidelines:
-The pull request should include tests.
If the pull request adds functionality, the docs should be updated. Put -your new functionality into a function with a docstring, and add the -feature to the list in README.rst.
The pull request should work for Python 3.9-3.11.
To run a subset of tests:
-$ pytest test/unit/test_show_versions.py::test_get_main_info
-
See docs for rioxarray.open_rasterio
-[1]:
-
import rioxarray
-
-%matplotlib inline
-
[2]:
-
# from https://openaerialmap.org/
-cog_url = (
- "https://oin-hotosm.s3.amazonaws.com/"
- "5d7dad0becaf880008a9bc88/0/5d7dad0becaf880008a9bc89.tif"
-)
-
[3]:
-
rds = rioxarray.open_rasterio(cog_url, masked=True, overview_level=4)
-
[4]:
-
rds
-
[4]:
-
-<xarray.DataArray (band: 3, y: 312, x: 688)>
-[643968 values with dtype=float64]
-Coordinates:
- * band (band) int64 1 2 3
- * y (y) float64 4.34e+06 4.34e+06 4.34e+06 ... 4.339e+06 4.339e+06
- * x (x) float64 -1.333e+07 -1.333e+07 ... -1.333e+07 -1.333e+07
- spatial_ref int64 0
-Attributes:
- transform: (1.194328566955879, 0.0, -13334019.180693429, 0.0, -1.1943...
- scales: (1.0, 1.0, 1.0)
- offsets: (0.0, 0.0, 0.0)
- grid_mapping: spatial_ref
-
[5]:
-
rds.astype("int").plot.imshow(rgb="band")
-
-Clipping input data to the valid range for imshow with RGB data ([0..1] for floats or [0..255] for integers).
-
[5]:
-
-<matplotlib.image.AxesImage at 0x7fa30cc3aeb8>
-
[1]:
-
import rioxarray # for the extension to load
-import xarray
-
-%matplotlib inline
-
[2]:
-
xds = xarray.open_dataarray("../../test/test_data/input/MODIS_ARRAY.nc")
-
[3]:
-
xds
-
[3]:
-
<xarray.DataArray (y: 200, x: 200)> -array([[ nan, nan, nan, ..., 656., 656., 554.], - [ nan, nan, nan, ..., 694., 694., 642.], - [ nan, nan, nan, ..., 456., 575., 642.], - ..., - [993., 817., 817., ..., 471., 479., 498.], - [893., 893., 816., ..., 479., 479., 469.], - [816., 816., 832., ..., 515., 469., 485.]], dtype=float32) -Coordinates: - * y (y) float64 5.05e+06 5.05e+06 5.05e+06 ... 5.004e+06 5.004e+06 - * x (x) float64 -7.274e+06 -7.274e+06 ... -7.228e+06 -7.228e+06 -Attributes: - crs: +a=6371007.181 +b=6371007.181 +lon_0=0 +no_defs +proj=sinu +u... - res: [231.65635826 231.65635826] - is_tiled: 0 - nodata: -28672.0 - transform: [ 2.31656358e+02 0.00000000e+00 -7.27400965e+06 0.00000000e...
[4]:
-
xds.plot()
-
[4]:
-
-<matplotlib.collections.QuadMesh at 0x7f6aa6241360>
-
See docs for rio.clip_box
:
[5]:
-
xdsc = xds.rio.clip_box(
- minx=-7272967.1958741,
- miny=5048602.84382404,
- maxx=-7272503.88315758,
- maxy=5049066.15654056,
-)
-
[6]:
-
xdsc.plot()
-
[6]:
-
-<matplotlib.collections.QuadMesh at 0x7f6a9e1c64a0>
-
You can also clip using bounds in a CRS different from the dataset if you pass in the crs
kwarg (requires rioxarray 0.12+):
[7]:
-
xdscn = xds.rio.clip_box(
- minx=-93.1558,
- miny=45.403,
- maxx=-93.1557,
- maxy=45.4065,
- crs="EPSG:4326",
-)
-
[8]:
-
xdscn.plot()
-
[8]:
-
-<matplotlib.collections.QuadMesh at 0x7f6a9c8815a0>
-
API Reference for rio.clip
:
[1]:
-
import rioxarray
-
-%matplotlib inline
-
See docs for rioxarray.open_rasterio
-Notes:
-masked=True
will convert from integer to float64
and fill with NaN
. If this behavior is not desired, you can skip this.
[2]:
-
xds = rioxarray.open_rasterio(
- "../../test/test_data/compare/small_dem_3m_merged.tif",
- masked=True,
-)
-
[3]:
-
xds.plot()
-
[3]:
-
-<matplotlib.collections.QuadMesh at 0x7fc957fb1ac0>
-
By default, it assumes that the CRS of the geometry is the same as the CRS of the dataset. If it is different, make sure to pass in the CRS of the geometry.
-[4]:
-
geometries = [
- {
- 'type': 'Polygon',
- 'coordinates': [[
- [425499.18381405267, 4615331.540546387],
- [425499.18381405267, 4615478.540546387],
- [425526.18381405267, 4615478.540546387],
- [425526.18381405267, 4615331.540546387],
- [425499.18381405267, 4615331.540546387]
- ]]
- }
-]
-clipped = xds.rio.clip(geometries)
-
[5]:
-
clipped.plot()
-
[5]:
-
-<matplotlib.collections.QuadMesh at 0x7fc957829d90>
-
[6]:
-
clipped.rio.to_raster("clipped.tif", compress='LZMA', tiled=True, dtype="int32")
-
[7]:
-
import geopandas
-from shapely.geometry import box
-
[8]:
-
geodf = geopandas.GeoDataFrame(
- geometry=[
- box(425499.18381405267, 4615331.540546387, 425526.18381405267, 4615478.540546387)
- ],
- crs="EPSG:26915"
-)
-
[9]:
-
clipped = xds.rio.clip(geodf.geometry.values, geodf.crs, drop=False, invert=True)
-# Note: If you have rasterio < 1.2 you will need convert the geometries to dict-like objects if the projection
-# of the geometries differ from the raster. For example:
-#
-# from shapely.geometry import mapping
-# geometries = geodf.geometry.apply(mapping)
-
[10]:
-
clipped.plot()
-
[10]:
-
-<matplotlib.collections.QuadMesh at 0x7fc9572aa100>
-
[11]:
-
clipped.rio.to_raster("clipped_invert.tif", compress='LZMA', tiled=True, dtype="int32")
-
Note: Loading from disk will likely only work directly after opening a raster with rioxarray.open_rasterio
-The clip operation needs the full raster loaded with the default method. This can be an issue if you don’t have enough memory (RAM) on you machine. If this is something you have run into, it is recommended to use the from_disk=True
option. This option uses rasterio.mask.mask when loading the data if possible.
But be careful, these two methods, as they use different core functions, can have different outputs: small discrepencies may appear on the borders (1 pixel added or removed on some borders, see issue #310)
-Alternatively, you can also use rio.clip_box
followed by rio.clip
for a more consistent memory efficient clip operation.
[12]:
-
geometries = [
- {
- 'type': 'Polygon',
- 'coordinates': [[
- [425499.18381405267, 4615331.540546387],
- [425499.18381405267, 4615478.540546387],
- [425526.18381405267, 4615478.540546387],
- [425526.18381405267, 4615331.540546387],
- [425499.18381405267, 4615331.540546387]
- ]]
- }
-]
-
-clipped = rioxarray.open_rasterio(
- "../../test/test_data/compare/small_dem_3m_merged.tif",
- masked=True,
-).rio.clip(geometries, from_disk=True)
-
[13]:
-
clipped.plot()
-
[13]:
-
-<matplotlib.collections.QuadMesh at 0x7fc9571db850>
-
Often, it is desirable to take a variable (band) out of your dataset and export it to a raster. This is possible with the rio.to_raster()
method. It does most of the work for you so you don’t have to.
Note: The rio.to_raster()
method only works on a 2-dimensional or 3-dimensional xarray.DataArray
or a 2-dimensional xarray.Dataset
.
API Reference:
-DataArray: rio.to_raster()
Dataset: rio.to_raster()
[1]:
-
import rioxarray
-
See docs for rioxarray.open_rasterio
-[2]:
-
rds = rioxarray.open_rasterio(
- "../../test/test_data/input/PLANET_SCOPE_3D.nc",
-)
-rds
-
[2]:
-
<xarray.Dataset> -Dimensions: (y: 10, x: 10, time: 2) -Coordinates: - * y (y) float64 8.085e+06 8.085e+06 ... 8.085e+06 8.085e+06 - * x (x) float64 4.663e+05 4.663e+05 ... 4.663e+05 4.663e+05 - * time (time) object 2016-12-19 10:27:29.687763 2016-12-29 12:52:42... - spatial_ref int64 0 -Data variables: - blue (time, y, x) float64 6.611 5.581 0.3996 ... 3.491 5.056 3.368 - green (time, y, x) float64 7.921 66.15 30.1 ... 21.76 27.29 18.41 -Attributes: - coordinates: spatial_ref
Dataset: rio.to_raster()
-[3]:
-
# note how one time slice was selected on export to make the dataset 2D
-rds.isel(time=0).rio.to_raster("planet_scope.tif")
-
[4]:
-
!rio info planet_scope.tif
-
-{"bounds": [466266.0, 8084670.0, 466296.0, 8084700.0], "colorinterp": ["gray", "undefined"], "count": 2, "crs": "EPSG:32722", "descriptions": ["blue", "green"], "driver": "GTiff", "dtype": "float64", "height": 10, "indexes": [1, 2], "interleave": "pixel", "lnglat": [-51.31732641226951, -17.322997474192466], "mask_flags": [["nodata"], ["nodata"]], "nodata": NaN, "res": [3.0, 3.0], "shape": [10, 10], "tiled": false, "transform": [3.0, 0.0, 466266.0, 0.0, -3.0, 8084700.0, 0.0, 0.0, 1.0], "units": [null, null], "width": 10}
-
DataArray: rio.to_raster()
-[5]:
-
# note how selecting one variable allowed for multiple time steps in a single raster
-rds.green.rio.to_raster("planet_scope_green.tif")
-
[6]:
-
!rio info planet_scope_green.tif
-
-{"bounds": [466266.0, 8084670.0, 466296.0, 8084700.0], "colorinterp": ["gray", "undefined"], "count": 2, "crs": "EPSG:32722", "descriptions": ["green", "green"], "driver": "GTiff", "dtype": "float64", "height": 10, "indexes": [1, 2], "interleave": "pixel", "lnglat": [-51.31732641226951, -17.322997474192466], "mask_flags": [["nodata"], ["nodata"]], "nodata": NaN, "res": [3.0, 3.0], "shape": [10, 10], "tiled": false, "transform": [3.0, 0.0, 466266.0, 0.0, -3.0, 8084700.0, 0.0, 0.0, 1.0], "units": [null, null], "width": 10}
-
Useful for reading and writing larger rasters to disk.
-Note: This will increase the time it takes to generate the raster.
-Also see: Reading and Writing with Dask
-[7]:
-
rds = rioxarray.open_rasterio(
- "../../test/test_data/input/PLANET_SCOPE_3D.nc",
- cache=False, # don't keep data loaded in memory. pull from disk every time
-)
-
-rds.green.rio.to_raster(
- "planet_scope_tiled.tif",
- tiled=True, # GDAL: By default striped TIFF files are created. This option can be used to force creation of tiled TIFF files.
- windowed=True, # rioxarray: read & write one window at a time
-)
-
[8]:
-
!rio info planet_scope_tiled.tif
-
-{"blockxsize": 256, "blockysize": 256, "bounds": [466266.0, 8084670.0, 466296.0, 8084700.0], "colorinterp": ["gray", "undefined"], "count": 2, "crs": "EPSG:32722", "descriptions": ["green", "green"], "driver": "GTiff", "dtype": "float64", "height": 10, "indexes": [1, 2], "interleave": "pixel", "lnglat": [-51.31732641226951, -17.322997474192466], "mask_flags": [["nodata"], ["nodata"]], "nodata": NaN, "res": [3.0, 3.0], "shape": [10, 10], "tiled": true, "transform": [3.0, 0.0, 466266.0, 0.0, -3.0, 8084700.0, 0.0, 0.0, 1.0], "units": [null, null], "width": 10}
-
[1]:
-
import multiprocessing
-# Linux/OSX:
-import multiprocessing.popen_spawn_posix
-# Windows:
-# import multiprocessing.popen_spawn_win32
-import threading
-
-from dask.distributed import Client, LocalCluster, Lock
-
-import rioxarray
-
Tips for using dask locks: - Be careful about what lock you use for your process. It is required to have a lock for each worker, so the more fine-grained the better. - The reading and writing processes need the same type of lock. They don’t have to share the same lock, but they do nead a lock of the same type.
-See docs for:
-DataArray: rio.to_raster()
Dataset: rio.to_raster()
Note: Without a lock provided, to_raster
does not use dask to write to disk.
[2]:
-
xds = rioxarray.open_rasterio(
- "../../test/test_data/compare/small_dem_3m_merged.tif",
- chunks=True,
-)
-xds.rio.to_raster("simple_write.tif", tiled=True)
-
[3]:
-
xds = rioxarray.open_rasterio(
- "../../test/test_data/compare/small_dem_3m_merged.tif",
- chunks=True,
- lock=False,
- # lock=threading.Lock(), # when too many file handles open
-xds.rio.to_raster(
- "dask_thread.tif", tiled=True, lock=threading.Lock(),
-)
-
[4]:
-
with LocalCluster() as cluster, Client(cluster) as client:
- xds = rioxarray.open_rasterio(
- "../../test/test_data/compare/small_dem_3m_merged.tif",
- chunks=True,
- lock=False,
- # lock=Lock("rio-read", client=client), # when too many file handles open
- )
- xds.rio.to_raster(
- "dask_multiworker.tif",
- tiled=True,
- lock=Lock("rio", client=client),
- )
-
This page contains links to a collection of examples of how to use rioxarray.
-[1]:
-
import rioxarray # for the extension to load
-import xarray
-
-%matplotlib inline
-
[2]:
-
xds = xarray.open_dataarray("MODIS_ARRAY.nc")
-
[3]:
-
xds
-
[3]:
-
-<xarray.DataArray (y: 200, x: 200)>
-array([[ nan, nan, nan, ..., 656., 656., 554.],
- [ nan, nan, nan, ..., 694., 694., 642.],
- [ nan, nan, nan, ..., 456., 575., 642.],
- ...,
- [993., 817., 817., ..., 471., 479., 498.],
- [893., 893., 816., ..., 479., 479., 469.],
- [816., 816., 832., ..., 515., 469., 485.]], dtype=float32)
-Coordinates:
- * y (y) float64 5.05e+06 5.05e+06 5.05e+06 ... 5.004e+06 5.004e+06
- * x (x) float64 -7.274e+06 -7.274e+06 ... -7.228e+06 -7.228e+06
-Attributes:
- crs: +a=6371007.181 +b=6371007.181 +lon_0=0 +no_defs +proj=sinu +u...
- res: [231.65635826 231.65635826]
- is_tiled: 0
- nodata: -28672.0
- transform: [ 2.31656358e+02 0.00000000e+00 -7.27400965e+06 0.00000000e...
-
[4]:
-
xds.isel(x=slice(0, 20), y=slice(0, 20)).plot()
-
[4]:
-
-<matplotlib.collections.QuadMesh at 0x7f3bf197f978>
-
API Reference:
-DataArray: rio.interpolate_na()
Dataset: rio.interpolate_na()
[5]:
-
filled = xds.rio.interpolate_na()
-
[6]:
-
filled
-
[6]:
-
-<xarray.DataArray (y: 200, x: 200)>
-array([[673., 558., 687., ..., 656., 656., 554.],
- [673., 558., 558., ..., 694., 694., 642.],
- [673., 558., 558., ..., 456., 575., 642.],
- ...,
- [993., 817., 817., ..., 471., 479., 498.],
- [893., 893., 816., ..., 479., 479., 469.],
- [816., 816., 832., ..., 515., 469., 485.]], dtype=float32)
-Coordinates:
- * y (y) float64 5.05e+06 5.05e+06 5.05e+06 ... 5.004e+06 5.004e+06
- * x (x) float64 -7.274e+06 -7.274e+06 ... -7.228e+06 -7.228e+06
- spatial_ref int64 0
-Attributes:
- transform: (231.6563582639561, 0.0, -7274009.649486291, 0.0, -231.656...
- _FillValue: -28672.0
- grid_mapping: spatial_ref
-
[7]:
-
filled.isel(x=slice(0, 20), y=slice(0, 20)).plot()
-
[7]:
-
-<matplotlib.collections.QuadMesh at 0x7f3bf1865860>
-
[1]:
-
import rioxarray # for the extension to load
-import xarray
-from rioxarray.merge import merge_arrays
-# Note: You can merge datasets with the merge_datasets method
-
-%matplotlib inline
-
API reference:
- -[2]:
-
dem_test = "../../test/test_data/input/MODIS_ARRAY.nc"
-rds = rioxarray.open_rasterio(dem_test)
-arrays = [
- rds.isel(x=slice(100), y=slice(100)),
- rds.isel(x=slice(100, 200), y=slice(100, 200)),
- rds.isel(x=slice(100), y=slice(100, 200)),
- rds.isel(x=slice(100, 200), y=slice(100)),
-]
-merged = merge_arrays(arrays)
-
[3]:
-
rds.where(rds!=rds.rio.nodata).plot();
-
[4]:
-
merged.where(merged!=merged.rio.nodata).plot()
-
[4]:
-
-<matplotlib.collections.QuadMesh at 0x7f84f86f13c8>
-
[1]:
-
import rioxarray # for the extension to load
-import xarray
-
-%matplotlib inline
-
[2]:
-
xds = xarray.open_dataarray("../../test/test_data/input/MODIS_ARRAY.nc")
-
[3]:
-
xds
-
[3]:
-
array([[ nan, nan, nan, ..., 656., 656., 554.], - [ nan, nan, nan, ..., 694., 694., 642.], - [ nan, nan, nan, ..., 456., 575., 642.], - ..., - [993., 817., 817., ..., 471., 479., 498.], - [893., 893., 816., ..., 479., 479., 469.], - [816., 816., 832., ..., 515., 469., 485.]], dtype=float32)
array([5049992.781974, 5049761.125615, 5049529.469257, 5049297.812899, - 5049066.156541, 5048834.500182, 5048602.843824, 5048371.187466, - 5048139.531108, 5047907.874749, 5047676.218391, 5047444.562033, - 5047212.905674, 5046981.249316, 5046749.592958, 5046517.9366 , - 5046286.280241, 5046054.623883, 5045822.967525, 5045591.311167, - 5045359.654808, 5045127.99845 , 5044896.342092, 5044664.685734, - 5044433.029375, 5044201.373017, 5043969.716659, 5043738.0603 , - 5043506.403942, 5043274.747584, 5043043.091226, 5042811.434867, - 5042579.778509, 5042348.122151, 5042116.465793, 5041884.809434, - 5041653.153076, 5041421.496718, 5041189.84036 , 5040958.184001, - 5040726.527643, 5040494.871285, 5040263.214927, 5040031.558568, - 5039799.90221 , 5039568.245852, 5039336.589493, 5039104.933135, - 5038873.276777, 5038641.620419, 5038409.96406 , 5038178.307702, - 5037946.651344, 5037714.994986, 5037483.338627, 5037251.682269, - 5037020.025911, 5036788.369553, 5036556.713194, 5036325.056836, - 5036093.400478, 5035861.74412 , 5035630.087761, 5035398.431403, - 5035166.775045, 5034935.118686, 5034703.462328, 5034471.80597 , - 5034240.149612, 5034008.493253, 5033776.836895, 5033545.180537, - 5033313.524179, 5033081.86782 , 5032850.211462, 5032618.555104, - 5032386.898746, 5032155.242387, 5031923.586029, 5031691.929671, - 5031460.273313, 5031228.616954, 5030996.960596, 5030765.304238, - 5030533.647879, 5030301.991521, 5030070.335163, 5029838.678805, - 5029607.022446, 5029375.366088, 5029143.70973 , 5028912.053372, - 5028680.397013, 5028448.740655, 5028217.084297, 5027985.427939, - 5027753.77158 , 5027522.115222, 5027290.458864, 5027058.802506, - 5026827.146147, 5026595.489789, 5026363.833431, 5026132.177072, - 5025900.520714, 5025668.864356, 5025437.207998, 5025205.551639, - 5024973.895281, 5024742.238923, 5024510.582565, 5024278.926206, - 5024047.269848, 5023815.61349 , 5023583.957132, 5023352.300773, - 5023120.644415, 5022888.988057, 5022657.331698, 5022425.67534 , - 5022194.018982, 5021962.362624, 5021730.706265, 5021499.049907, - 5021267.393549, 5021035.737191, 5020804.080832, 5020572.424474, - 5020340.768116, 5020109.111758, 5019877.455399, 5019645.799041, - 5019414.142683, 5019182.486325, 5018950.829966, 5018719.173608, - 5018487.51725 , 5018255.860891, 5018024.204533, 5017792.548175, - 5017560.891817, 5017329.235458, 5017097.5791 , 5016865.922742, - 5016634.266384, 5016402.610025, 5016170.953667, 5015939.297309, - 5015707.640951, 5015475.984592, 5015244.328234, 5015012.671876, - 5014781.015518, 5014549.359159, 5014317.702801, 5014086.046443, - 5013854.390084, 5013622.733726, 5013391.077368, 5013159.42101 , - 5012927.764651, 5012696.108293, 5012464.451935, 5012232.795577, - 5012001.139218, 5011769.48286 , 5011537.826502, 5011306.170144, - 5011074.513785, 5010842.857427, 5010611.201069, 5010379.544711, - 5010147.888352, 5009916.231994, 5009684.575636, 5009452.919277, - 5009221.262919, 5008989.606561, 5008757.950203, 5008526.293844, - 5008294.637486, 5008062.981128, 5007831.32477 , 5007599.668411, - 5007368.012053, 5007136.355695, 5006904.699337, 5006673.042978, - 5006441.38662 , 5006209.730262, 5005978.073904, 5005746.417545, - 5005514.761187, 5005283.104829, 5005051.44847 , 5004819.792112, - 5004588.135754, 5004356.479396, 5004124.823037, 5003893.166679])
array([-7273893.821307, -7273662.164949, -7273430.508591, -7273198.852232, - -7272967.195874, -7272735.539516, -7272503.883158, -7272272.226799, - -7272040.570441, -7271808.914083, -7271577.257725, -7271345.601366, - -7271113.945008, -7270882.28865 , -7270650.632291, -7270418.975933, - -7270187.319575, -7269955.663217, -7269724.006858, -7269492.3505 , - -7269260.694142, -7269029.037784, -7268797.381425, -7268565.725067, - -7268334.068709, -7268102.412351, -7267870.755992, -7267639.099634, - -7267407.443276, -7267175.786918, -7266944.130559, -7266712.474201, - -7266480.817843, -7266249.161484, -7266017.505126, -7265785.848768, - -7265554.19241 , -7265322.536051, -7265090.879693, -7264859.223335, - -7264627.566977, -7264395.910618, -7264164.25426 , -7263932.597902, - -7263700.941544, -7263469.285185, -7263237.628827, -7263005.972469, - -7262774.31611 , -7262542.659752, -7262311.003394, -7262079.347036, - -7261847.690677, -7261616.034319, -7261384.377961, -7261152.721603, - -7260921.065244, -7260689.408886, -7260457.752528, -7260226.09617 , - -7259994.439811, -7259762.783453, -7259531.127095, -7259299.470737, - -7259067.814378, -7258836.15802 , -7258604.501662, -7258372.845303, - -7258141.188945, -7257909.532587, -7257677.876229, -7257446.21987 , - -7257214.563512, -7256982.907154, -7256751.250796, -7256519.594437, - -7256287.938079, -7256056.281721, -7255824.625363, -7255592.969004, - -7255361.312646, -7255129.656288, -7254897.99993 , -7254666.343571, - -7254434.687213, -7254203.030855, -7253971.374496, -7253739.718138, - -7253508.06178 , -7253276.405422, -7253044.749063, -7252813.092705, - -7252581.436347, -7252349.779989, -7252118.12363 , -7251886.467272, - -7251654.810914, -7251423.154556, -7251191.498197, -7250959.841839, - -7250728.185481, -7250496.529122, -7250264.872764, -7250033.216406, - -7249801.560048, -7249569.903689, -7249338.247331, -7249106.590973, - -7248874.934615, -7248643.278256, -7248411.621898, -7248179.96554 , - -7247948.309182, -7247716.652823, -7247484.996465, -7247253.340107, - -7247021.683749, -7246790.02739 , -7246558.371032, -7246326.714674, - -7246095.058315, -7245863.401957, -7245631.745599, -7245400.089241, - -7245168.432882, -7244936.776524, -7244705.120166, -7244473.463808, - -7244241.807449, -7244010.151091, -7243778.494733, -7243546.838375, - -7243315.182016, -7243083.525658, -7242851.8693 , -7242620.212942, - -7242388.556583, -7242156.900225, -7241925.243867, -7241693.587508, - -7241461.93115 , -7241230.274792, -7240998.618434, -7240766.962075, - -7240535.305717, -7240303.649359, -7240071.993001, -7239840.336642, - -7239608.680284, -7239377.023926, -7239145.367568, -7238913.711209, - -7238682.054851, -7238450.398493, -7238218.742135, -7237987.085776, - -7237755.429418, -7237523.77306 , -7237292.116701, -7237060.460343, - -7236828.803985, -7236597.147627, -7236365.491268, -7236133.83491 , - -7235902.178552, -7235670.522194, -7235438.865835, -7235207.209477, - -7234975.553119, -7234743.896761, -7234512.240402, -7234280.584044, - -7234048.927686, -7233817.271327, -7233585.614969, -7233353.958611, - -7233122.302253, -7232890.645894, -7232658.989536, -7232427.333178, - -7232195.67682 , -7231964.020461, -7231732.364103, -7231500.707745, - -7231269.051387, -7231037.395028, -7230805.73867 , -7230574.082312, - -7230342.425954, -7230110.769595, -7229879.113237, -7229647.456879, - -7229415.80052 , -7229184.144162, -7228952.487804, -7228720.831446, - -7228489.175087, -7228257.518729, -7228025.862371, -7227794.206013])
[5]:
-
xds.plot()
-
[5]:
-
-<matplotlib.collections.QuadMesh at 0x7fcb90621438>
-
See docs for rio.pad_box
:
[6]:
-
xdsc = xds.rio.pad_box(
- minx=-7.3e+06,
- miny=4.99e+06,
- maxx=-7.2e+06,
- maxy=5.06e+06,
-)
-
[9]:
-
xdsc.values[0, 0]
-
[9]:
-
-nan
-
[8]:
-
xdsc.plot()
-
[8]:
-
-<matplotlib.collections.QuadMesh at 0x7fcb8ed54ba8>
-
Cloud Optimized Geotiffs (COGs) can be internally chunked, which makes it possible to read them in parallel from multiple threads. However, the libraries rioxarray
builds on, rasterio
and GDAL
, require some care to be used safely from multiple threads within a single process. By default, rioxarray.open_rasterio will acquire a per-process lock when reading a chunk of a COG.
If you’re using rioxarray
with Dask through the chunks
keyword, you can also specify the lock=False
keyword to ensure that reading and operating on your data happen in parallel.
Note: Also see Reading and Writing with Dask
-Dask has several schedulers which run computations in parallel. Which scheduler is best depends on a variety of factors, including whether your computation holds Python’s Global Interpreter Lock, whether how much data needs to be moved around, and whether you need more than one machine’s computational power. This section about read-locks only applies if you have more than one thread in a process. This will happen with Dask’s local threaded -scheduler and its distributed scheduler when configured to use more than one thread per worker.
-By default, xarray
objects will use the local threaded
scheduler.
To read a COG without any locks, you’d specify lock=False
. This tells rioxarray
to open a new rasterio.DatasetReader
in each thread, rather than trying to share one amongst multiple threads.
[1]:
-
import rioxarray
-
-url = (
- "https://naipeuwest.blob.core.windows.net/naip/v002/md/2013/md_100cm_2013/"
- "39076/m_3907617_ne_18_1_20130924.tif"
-)
-
[2]:
-
ds = rioxarray.open_rasterio(url, lock=False, chunks=(4, "auto", -1))
-%time _ = ds.mean().compute()
-
-CPU times: user 2.4 s, sys: 361 ms, total: 2.76 s
-Wall time: 3.32 s
-
Note: these timings are from a VM in the same Azure data center that’s hosting the COG. Running this locally will give different times.
-For maximum read performance, the chunking pattern you request should align with the internal chunking of the COG. Typically this means reading the data in a “row major” format: your chunks should be as wide as possible along the columns. We did that above with the chunks of (4, "auto", -1)
. The -1
says “include all the columns”, and the "auto"
will make the chunking along the rows as large as possible while staying in a reasonable limit (specified in
-dask.config.get("array.chunk-size")
).
If we flipped that, and instead read as much of the rows as possible, we’ll see slower performance.
-[2]:
-
ds = rioxarray.open_rasterio(url, lock=False, chunks=(1, -1, "auto"))
-%time _ = ds.mean().compute()
-
-CPU times: user 8.58 s, sys: 1.08 s, total: 9.66 s
-Wall time: 11.2 s
-
That said, reading is typically just the first step in a larger computation. You’d want to consider what chunking is best for your whole computation. See https://docs.dask.org/en/latest/array-chunks.html for more on choosing chunks.
-Specifying lock=False
will disable some internal caching done by xarray or rasterio. For example, the first and second reads here are roughly the same, since nothing is cached.
[2]:
-
ds = rioxarray.open_rasterio(url, lock=False, chunks=(4, "auto", -1))
-%time _ = ds.mean().compute()
-
-CPU times: user 2.49 s, sys: 392 ms, total: 2.88 s
-Wall time: 3.25 s
-
[3]:
-
%time _ = ds.mean().compute()
-
-CPU times: user 2.48 s, sys: 292 ms, total: 2.78 s
-Wall time: 2.97 s
-
By default and when a lock is passed in, the initial read is slower (since some threads are waiting around for a lock).
-[2]:
-
ds = rioxarray.open_rasterio(url, chunks=(4, "auto", -1)) # use the default locking
-%time _ = ds.mean().compute()
-
-CPU times: user 2.15 s, sys: 284 ms, total: 2.44 s
-Wall time: 5.03 s
-
But thanks to caching, subsequent reads are much faster.
-[3]:
-
%time _ = ds.mean().compute()
-
-CPU times: user 223 ms, sys: 64.9 ms, total: 288 ms
-Wall time: 200 ms
-
If you’re reapeatedly reading subsets of the data, using the default lock or lock=some_lock_object
to benefit from the caching.
To re-project with dask, see odc-geo & pyresample.
-[1]:
-
import rioxarray # for the extension to load
-import xarray
-import rasterio
-
-%matplotlib inline
-
[2]:
-
xds = xarray.open_dataset("../../test/test_data/input/PLANET_SCOPE_3D.nc", decode_coords="all")
-
[3]:
-
xds
-
[3]:
-
<xarray.Dataset> -Dimensions: (time: 2, x: 10, y: 10) -Coordinates: - spatial_ref int64 0 - * x (x) float64 4.663e+05 4.663e+05 ... 4.663e+05 4.663e+05 - * time (time) datetime64[ns] 2016-12-19T10:27:29.687763 2016-12-29T... - * y (y) float64 8.085e+06 8.085e+06 ... 8.085e+06 8.085e+06 -Data variables: - blue (time, y, x) float64 6.611 5.581 0.3996 ... 3.491 5.056 3.368 - green (time, y, x) float64 7.921 66.15 30.1 ... 21.76 27.29 18.41
array(0)
array([466267.5, 466270.5, 466273.5, 466276.5, 466279.5, 466282.5, 466285.5, - 466288.5, 466291.5, 466294.5])
array(['2016-12-19T10:27:29.687763000', '2016-12-29T12:52:42.347451000'], - dtype='datetime64[ns]')
array([8084698.5, 8084695.5, 8084692.5, 8084689.5, 8084686.5, 8084683.5, - 8084680.5, 8084677.5, 8084674.5, 8084671.5])
array([[[6.611017, 5.580979, 0.399607, 2.052803, 5.479985, 4.760219, - 5.077927, 5.574792, 0.726683, 5.170288], - [4.535516, 0.088263, 4.222302, 0.289199, 3.478147, 3.227945, - 2.736443, 2.821799, 1.04221 , 1.099616], - [0.071364, 4.393267, 0.496907, 2.311926, 1.19123 , 5.984189, - 5.266977, 1.146988, 3.219185, 0.982011], - [5.818552, 5.176065, 4.891903, 4.557147, 4.706706, 4.670835, - 2.344188, 0.493237, 2.707488, 4.15662 ], - [0.78458 , 3.838047, 0.803847, 1.588861, 5.002692, 2.565792, - 5.195116, 2.642173, 1.47148 , 6.854931], - [4.407593, 6.734126, 3.702884, 4.682068, 0.498895, 2.931409, - 5.713952, 6.388658, 6.618634, 3.267176], - [6.175152, 2.331721, 6.829246, 1.650656, 5.530157, 0.167316, - 6.406627, 4.559968, 0.083601, 1.231478], - [2.438085, 1.593681, 5.902827, 2.124327, 1.870033, 4.519462, - 4.845505, 6.214475, 2.237778, 4.178488], - [5.855639, 5.824333, 6.006212, 5.43411 , 3.393885, 6.183958, - 1.284061, 0.028555, 6.726706, 1.516993], - [0.844103, 5.61329 , 6.978045, 1.183779, 2.539376, 1.268038, - 1.27574 , 5.101129, 2.020225, 2.081746]], - - [[6.242907, 6.394763, 1.302492, 2.895077, 0.537664, 5.012127, - 4.380069, 1.624007, 5.56236 , 4.303491], - [6.745937, 3.653031, 5.116965, 1.835256, 3.514732, 6.523679, - 0.17986 , 2.444248, 5.169099, 0.584623], - [2.119518, 0.479052, 4.274714, 6.019049, 6.346329, 2.858441, - 3.976887, 4.218218, 4.320734, 3.941107], - [2.974649, 6.67996 , 3.16875 , 3.507162, 2.909711, 1.359386, - 4.023015, 0.258809, 2.007625, 5.744746], - [2.572982, 1.250802, 4.652006, 6.310958, 3.266477, 3.216613, - 3.47315 , 1.713415, 5.9575 , 2.729016], - [2.743341, 4.203021, 1.306215, 2.840115, 5.351451, 2.774677, - 2.621753, 5.288077, 0.722618, 4.086008], - [3.168672, 4.031863, 5.775087, 3.487762, 6.034522, 4.721887, - 5.093018, 0.386024, 2.278799, 1.536351], - [4.548959, 4.797754, 6.371152, 3.69425 , 6.855343, 1.987757, - 4.749654, 1.750539, 1.804795, 4.966536], - [1.78139 , 1.544275, 1.750026, 3.373274, 2.604254, 5.894015, - 3.217723, 5.637478, 2.900347, 2.097813], - [1.844174, 6.994962, 2.504717, 5.292132, 0.184411, 4.870834, - 1.888442, 3.491315, 5.055704, 3.368395]]])
array([[[ 7.920639, 66.150832, 30.096116, 30.437197, 57.794734, 11.538647, - 14.426782, 35.593171, 53.784858, 0.449093], - [23.804111, 67.910347, 18.694533, 30.41474 , 68.117674, 44.906057, - 62.311842, 37.485047, 57.134336, 7.52572 ], - [36.653481, 39.596833, 61.07603 , 56.883093, 29.635613, 64.114699, - 42.341689, 54.724789, 31.872344, 11.282554], - [32.502176, 28.090549, 58.398927, 41.224019, 34.804777, 32.184565, - 59.392327, 9.155824, 52.967172, 67.409236], - [18.79468 , 8.543429, 35.834698, 3.596245, 30.252802, 41.549499, - 23.060248, 7.267762, 27.374099, 0.684024], - [ 9.93227 , 44.509446, 22.01927 , 28.514121, 36.715233, 15.03939 , - 2.43399 , 0.636075, 34.43023 , 37.024545], - [28.874823, 1.514255, 34.210822, 10.49793 , 54.025491, 64.294026, - 36.212813, 17.766628, 45.295952, 10.349576], - [58.961924, 47.334638, 64.844646, 37.634131, 7.815025, 35.139303, - 18.501505, 35.01185 , 27.761908, 13.240655], - [49.544668, 57.716538, 27.389778, 11.604377, 24.826367, 15.449456, - 26.482386, 42.855739, 10.4958 , 59.267182], - [37.094093, 43.294246, 33.240747, 16.85513 , 54.705119, 14.633291, - 35.138742, 50.101683, 57.495953, 52.795405]], - - [[26.137058, 16.448086, 4.503539, 33.351036, 20.32524 , 63.369743, - 11.531512, 38.629561, 59.821441, 11.547508], - [ 6.438471, 28.948907, 9.949052, 23.234921, 65.539507, 9.822554, - 55.754023, 51.590388, 57.047098, 4.929671], - [32.360472, 17.939979, 52.889505, 69.181176, 39.2923 , 56.442225, - 5.699603, 21.092554, 8.93472 , 23.810367], - [ 3.715695, 30.653733, 44.540496, 48.578544, 24.032477, 30.339109, - 37.376636, 58.787274, 49.308994, 59.510765], - [37.991912, 50.023013, 63.711135, 26.546118, 60.561058, 36.098302, - 10.725673, 40.51609 , 47.479255, 42.710909], - [15.32887 , 15.878984, 30.914778, 25.902812, 3.815428, 35.530366, - 48.426293, 44.280075, 4.468083, 18.00032 ], - [58.286993, 20.397714, 63.124 , 69.117495, 1.32436 , 29.024715, - 31.601531, 34.307982, 7.990292, 36.159696], - [34.674335, 62.344993, 5.714717, 2.161448, 66.714977, 52.443751, - 12.791859, 63.707491, 13.697063, 9.394474], - [16.766346, 26.260123, 40.68158 , 18.695085, 64.812126, 23.151592, - 50.597642, 61.308205, 31.517123, 23.469797], - [44.670989, 17.533084, 39.034907, 32.676726, 53.275139, 48.731172, - 12.958856, 21.760335, 27.292202, 18.409063]]])
[4]:
-
xds.green.where(xds.green!=xds.green.rio.nodata).isel(time=1).plot()
-
[4]:
-
-<matplotlib.collections.QuadMesh at 0x7f8f13620880>
-
API Reference:
-DataArray: rio.reproject()
Dataset: rio.reproject()
[5]:
-
xds_lonlat = xds.rio.reproject("EPSG:4326")
-
[6]:
-
xds_lonlat
-
[6]:
-
<xarray.Dataset> -Dimensions: (time: 2, x: 10, y: 10) -Coordinates: - * x (x) float64 -51.32 -51.32 -51.32 ... -51.32 -51.32 -51.32 - * y (y) float64 -17.32 -17.32 -17.32 ... -17.32 -17.32 -17.32 - * time (time) datetime64[ns] 2016-12-19T10:27:29.687763 2016-12-29T... - spatial_ref int64 0 -Data variables: - blue (time, y, x) float64 6.611 5.581 0.3996 ... 3.491 5.056 3.368 - green (time, y, x) float64 7.921 66.15 30.1 ... 21.76 27.29 18.41
array([-51.317454, -51.317426, -51.317399, -51.317371, -51.317343, -51.317316, - -51.317288, -51.31726 , -51.317233, -51.317205])
array([-17.322876, -17.322903, -17.322931, -17.322959, -17.322986, -17.323014, - -17.323042, -17.323069, -17.323097, -17.323125])
array(['2016-12-19T10:27:29.687763000', '2016-12-29T12:52:42.347451000'], - dtype='datetime64[ns]')
array(0)
array([[[6.61101706, 5.58097901, 0.39960727, 2.05280345, 5.47998484, - 4.76021916, 5.07792715, 5.57479217, 0.72668295, 5.17028805], - [4.53551623, 0.08826297, 4.22230213, 0.28919903, 3.47814709, - 3.22794508, 2.73644333, 2.82179869, 1.04221025, 1.09961647], - [0.07136352, 4.39326719, 0.49690677, 2.31192634, 1.19123023, - 5.98418893, 5.26697738, 1.14698827, 3.21918509, 0.98201077], - [5.81855223, 5.17606488, 4.89190312, 4.55714657, 4.70670585, - 4.67083516, 2.34418779, 0.4932369 , 2.70748782, 4.15661977], - [0.78457995, 3.83804728, 0.80384702, 1.58886148, 5.00269192, - 2.56579153, 5.19511563, 2.6421726 , 1.47148022, 6.85493052], - [4.40759261, 6.73412648, 3.70288412, 4.68206812, 0.49889505, - 2.93140894, 5.71395224, 6.38865828, 6.61863439, 3.26717611], - [6.17515163, 2.33172093, 6.82924574, 1.65065578, 5.53015689, - 0.16731611, 6.40662729, 4.55996818, 0.08360072, 1.23147801], - [2.43808458, 1.59368058, 5.90282702, 2.12432736, 1.8700329 , - 4.51946233, 4.84550488, 6.21447495, 2.23777817, 4.17848811], - [5.85563919, 5.82433331, 6.00621159, 5.43410984, 3.39388501, - 6.18395763, 1.28406097, 0.02855524, 6.72670605, 1.51699291], - [0.84410255, 5.61328959, 6.97804534, 1.18377926, 2.53937552, - 1.26803824, 1.27574049, 5.10112907, 2.02022521, 2.08174614]], -... - [[6.24290664, 6.39476317, 1.30249235, 2.89507681, 0.5376643 , - 5.01212706, 4.38006939, 1.62400749, 5.56236028, 4.30349107], - [6.74593668, 3.65303136, 5.11696455, 1.83525557, 3.51473159, - 6.52367921, 0.17986011, 2.44424775, 5.16909939, 0.58462293], - [2.11951807, 0.47905249, 4.27471358, 6.01904917, 6.34632929, - 2.85844128, 3.97688665, 4.21821752, 4.32073426, 3.9411069 ], - [2.97464873, 6.67996018, 3.16875018, 3.50716191, 2.9097115 , - 1.35938603, 4.0230148 , 0.2588095 , 2.00762528, 5.74474576], - [2.57298212, 1.25080228, 4.65200582, 6.31095753, 3.26647688, - 3.21661254, 3.4731501 , 1.71341466, 5.95750029, 2.72901564], - [2.74334064, 4.20302139, 1.30621538, 2.84011532, 5.35145147, - 2.77467654, 2.6217532 , 5.28807657, 0.72261773, 4.08600786], - [3.1686723 , 4.0318634 , 5.77508699, 3.48776179, 6.0345218 , - 4.72188719, 5.09301838, 0.38602444, 2.27879875, 1.5363515 ], - [4.54895876, 4.79775447, 6.3711522 , 3.69424969, 6.85534296, - 1.98775739, 4.74965416, 1.75053883, 1.80479493, 4.96653593], - [1.78138972, 1.54427475, 1.75002571, 3.37327355, 2.60425433, - 5.89401538, 3.2177233 , 5.63747771, 2.90034677, 2.09781306], - [1.84417405, 6.99496234, 2.50471667, 5.29213205, 0.1844106 , - 4.87083388, 1.88844217, 3.49131529, 5.05570379, 3.36839469]]])
array([[[ 7.92063876, 66.15083163, 30.09611622, 30.43719706, - 57.79473407, 11.53864677, 14.42678195, 35.5931706 , - 53.78485769, 0.44909335], - [23.80411143, 67.91034696, 18.6945325 , 30.41473977, - 68.11767437, 44.90605663, 62.31184244, 37.48504719, - 57.13433649, 7.52571983], - [36.65348061, 39.59683262, 61.07602968, 56.88309328, - 29.63561283, 64.11469949, 42.34168918, 54.7247894 , - 31.87234425, 11.28255401], - [32.50217644, 28.0905485 , 58.39892665, 41.22401876, - 34.80477692, 32.18456541, 59.39232697, 9.15582412, - 52.96717208, 67.40923645], - [18.79468009, 8.54342894, 35.83469773, 3.59624497, - 30.25280185, 41.54949902, 23.06024792, 7.26776162, - 27.37409939, 0.68402392], - [ 9.93226996, 44.50944555, 22.01927024, 28.5141208 , - 36.71523344, 15.03939035, 2.43398964, 0.63607503, - 34.43022974, 37.02454468], - [28.87482271, 1.51425484, 34.21082199, 10.49792965, - 54.02549098, 64.29402555, 36.21281265, 17.76662845, -... - 24.03247674, 30.33910879, 37.37663568, 58.78727396, - 49.30899357, 59.51076479], - [37.99191205, 50.0230132 , 63.71113469, 26.54611832, - 60.5610581 , 36.09830221, 10.72567322, 40.51609048, - 47.47925466, 42.71090946], - [15.32887012, 15.8789836 , 30.91477844, 25.90281227, - 3.81542846, 35.53036591, 48.42629307, 44.28007476, - 4.46808288, 18.00031979], - [58.28699332, 20.39771424, 63.12400017, 69.11749464, - 1.32435965, 29.02471509, 31.60153124, 34.30798249, - 7.99029205, 36.15969559], - [34.67433533, 62.34499312, 5.71471651, 2.16144808, - 66.71497747, 52.4437509 , 12.79185872, 63.70749094, - 13.69706259, 9.39447357], - [16.76634605, 26.26012306, 40.68158023, 18.69508482, - 64.81212607, 23.15159177, 50.59764238, 61.30820521, - 31.51712269, 23.46979658], - [44.67098863, 17.53308399, 39.03490717, 32.67672611, - 53.27513876, 48.73117231, 12.95885589, 21.760335 , - 27.29220175, 18.40906318]]])
[7]:
-
xds_lonlat.green.where(xds_lonlat.green!=xds_lonlat.green.rio.nodata).isel(time=1).plot()
-
[7]:
-
-<matplotlib.collections.QuadMesh at 0x7f8f134b63a0>
-
API Reference:
- -[8]:
-
xds_utm = xds.rio.reproject(xds.rio.estimate_utm_crs())
-xds_utm.rio.crs
-
[8]:
-
-CRS.from_epsg(32722)
-
Using WarpedVRT enables re-projection from disk and reduces the amount of memory required with the re-projection.
-[9]:
-
url = (
- "https://storage.googleapis.com/"
- "gcp-public-data-landsat/LC08/01/047/027/"
- "LC08_L1TP_047027_20130421_20170310_01_T1/"
- "LC08_L1TP_047027_20130421_20170310_01_T1_B4.TIF"
-)
-env = rasterio.Env(
- GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR",
- CPL_VSIL_CURL_USE_HEAD=False,
- CPL_VSIL_CURL_ALLOWED_EXTENSIONS="TIF",
-)
-with env:
- with rasterio.open(url) as src:
- with rasterio.vrt.WarpedVRT(src, crs="EPSG:4326") as vrt:
- rds = rioxarray.open_rasterio(vrt)
- rds.sel(band=1).plot.imshow()
-
rio.reproject_match
will reproject to match the resolution, projection, and region of another raster.
This is useful for raster caclulations and stacking rasters.
-[1]:
-
import rioxarray # for the extension to load
-import xarray
-
-import matplotlib.pyplot as plt
-
-%matplotlib inline
-
[2]:
-
def print_raster(raster):
- print(
- f"shape: {raster.rio.shape}\n"
- f"resolution: {raster.rio.resolution()}\n"
- f"bounds: {raster.rio.bounds()}\n"
- f"sum: {raster.sum().item()}\n"
- f"CRS: {raster.rio.crs}\n"
- )
-
[3]:
-
xds = xarray.open_dataarray("../../test/test_data/input/MODIS_ARRAY.nc")
-xds_match = xarray.open_dataarray("../../test/test_data/input/MODIS_ARRAY_MATCH.nc")
-
[4]:
-
fig, axes = plt.subplots(ncols=2, figsize=(12,4))
-xds.plot(ax=axes[0])
-xds_match.plot(ax=axes[1])
-plt.draw()
-
[5]:
-
print("Original Raster:\n----------------\n")
-print_raster(xds)
-print("Raster to Match:\n----------------\n")
-print_raster(xds_match)
-
-Original Raster:
-----------------
-
-shape: (200, 200)
-resolution: (231.6563582639561, -231.65635826375018)
-bounds: (-7274009.649486291, 5003777.3385, -7227678.3778335, 5050108.61015275)
-sum: 23209796.0
-CRS: PROJCS["unknown",GEOGCS["unknown",DATUM["unknown",SPHEROID["unknown",6371007.181,0]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]]],PROJECTION["Sinusoidal"],PARAMETER["longitude_of_center",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]
-
-Raster to Match:
-----------------
-
-shape: (100, 150)
-resolution: (386.65122672362685, -386.65122672362685)
-bounds: (485124.8828918401, 4990535.635952473, 543122.5669003841, 5029200.758624835)
-sum: 4903477.0
-CRS: EPSG:32615
-
-
API Reference:
-DataArray: rio.reproject_match()
Dataset: rio.reproject_match()
[6]:
-
xds_repr_match = xds.rio.reproject_match(xds_match)
-
[7]:
-
print("Reprojected Raster:\n-------------------\n")
-print_raster(xds_repr_match)
-print("Raster to Match:\n----------------\n")
-print_raster(xds_match)
-
-Reprojected Raster:
--------------------
-
-shape: (100, 150)
-resolution: (386.6512267236268, -386.6512267236231)
-bounds: (485124.8828918401, 4990535.635952473, 543122.5669003841, 5029200.758624835)
-sum: 4930593.0
-CRS: EPSG:32615
-
-Raster to Match:
-----------------
-
-shape: (100, 150)
-resolution: (386.65122672362685, -386.65122672362685)
-bounds: (485124.8828918401, 4990535.635952473, 543122.5669003841, 5029200.758624835)
-sum: 4903477.0
-CRS: EPSG:32615
-
-
Now that the rasters have the same projection, resolution, and extents, you can do raster calculations.
-It is recommended to use assign_coords
to make the coordinates the exact same due to tiny differences in the coordinate values due to floating precision (issue 298).
[8]:
-
xds_repr_match = xds_repr_match.assign_coords({
- "x": xds_match.x,
- "y": xds_match.y,
-})
-xds_sum = xds_repr_match + xds_match
-
[9]:
-
print("Sum Raster:\n-----------\n")
-print_raster(xds_sum)
-
-Sum Raster:
------------
-
-shape: (100, 150)
-resolution: (386.6512267236268, -386.6512267236231)
-bounds: (485124.8828918401, 4990535.635952473, 543122.5669003841, 5029200.758624835)
-sum: 9814687.0
-CRS: EPSG:32615
-
-
[10]:
-
fig, axes = plt.subplots(ncols=3, figsize=(16,4))
-
-xds_repr_match.plot(ax=axes[0])
-xds_match.plot(ax=axes[1])
-xds_sum.plot(ax=axes[2])
-
-plt.draw()
-
This example demonstrates how to reproduce rasterio
’s resampling example here.
[1]:
-
from rasterio.enums import Resampling
-
-import rioxarray
-
-%matplotlib inline
-
See docs for rioxarray.open_rasterio
-Notes:
-masked=True
will convert from integer to float64
and fill with NaN
. If this behavior is not desired, you can skip this.
[2]:
-
xds = rioxarray.open_rasterio(
- "../../test/test_data/compare/small_dem_3m_merged.tif",
- masked=True,
-)
-
API Reference for rio.reproject
:
[3]:
-
upscale_factor = 2
-new_width = xds.rio.width * upscale_factor
-new_height = xds.rio.height * upscale_factor
-
-xds_upsampled = xds.rio.reproject(
- xds.rio.crs,
- shape=(new_height, new_width),
- resampling=Resampling.bilinear,
-)
-
[4]:
-
xds.shape
-
[4]:
-
-(1, 245, 574)
-
[5]:
-
xds_upsampled.shape
-
[5]:
-
-(1, 490, 1148)
-
[6]:
-
xds.rio.resolution()
-
[6]:
-
-(3.0, -3.0)
-
[7]:
-
xds_upsampled.rio.resolution()
-
[7]:
-
-(1.5, -1.5)
-
The rio.transform_bounds() method allows you to correctly estimate the bounds of your raster in a different CRS without needing to re-project it. If you simply calculate the bounds by transforming the bounds, there are often situations when this is incorrect due to nonlinear transformations.
-[1]:
-
import pyproj
-import rioxarray # for the extension to load
-import xarray
-from shapely.geometry import box
-
-import matplotlib.pyplot as plt
-
-%matplotlib inline
-
[2]:
-
xds = xarray.open_dataarray("../../test/test_data/input/MODIS_ARRAY.nc")
-transformer = pyproj.Transformer.from_crs(xds.rio.crs, "EPSG:4326", always_xy=True)
-
[3]:
-
ax = plt.subplot()
-xds.plot(ax=ax)
-ax.plot(
- *box(*xds.rio.bounds()).exterior.xy,
- color="red",
- linewidth=3,
-)
-
[3]:
-
-[<matplotlib.lines.Line2D at 0x7f324f4456c0>]
-
The rio.transform_bounds() method allows you to safely convert a bounding box into another projection taking into account the effects of nonlinear transformations.
-[4]:
-
reprojected_raster = xds.rio.reproject("EPSG:4326")
-
This is the benchmark. However, this method is computationally inefficient. So, if you don’t need to re-project, rio.transform_bounds() is a more efficent method.
-[5]:
-
reprojected_raster_box = box(*reprojected_raster.rio.bounds())
-
[6]:
-
ax = plt.subplot()
-reprojected_raster.plot(ax=ax)
-ax.plot(
- *reprojected_raster_box.exterior.xy,
- color="red",
- linewidth=3,
-)
-
[6]:
-
-[<matplotlib.lines.Line2D at 0x7f324735e4d0>]
-
Directly transforming the corners is an incorrect method to calculate the new boundary.
-[7]:
-
transform_box = box(*transformer.transform(*xds.rio.bounds()))
-
[8]:
-
ax = plt.subplot()
-reprojected_raster.plot(ax=ax)
-ax.plot(
- *transform_box.exterior.xy,
- color="red",
- linewidth=3,
-)
-
[8]:
-
-[<matplotlib.lines.Line2D at 0x7f3245a3bee0>]
-
rio.transform_bounds() is both computationally efficient and a correct method for calculating the bounds of your raster in the new projection.
-[9]:
-
transform_bounds_box = box(*xds.rio.transform_bounds("EPSG:4326"))
-
[10]:
-
ax = plt.subplot()
-reprojected_raster.plot(ax=ax)
-ax.plot(
- *transform_bounds_box.exterior.xy,
- color="red",
- linewidth=3,
-)
-
[10]:
-
-[<matplotlib.lines.Line2D at 0x7f32459316c0>]
-
As seen below, this is equivalent to the Transformer.transform_bounds method in pyproj:
-[11]:
-
pyproj_transform_bounds_box = box(*transformer.transform_bounds(*xds.rio.bounds()))
-
[12]:
-
ax = plt.subplot()
-reprojected_raster.plot(ax=ax)
-ax.plot(
- *transform_bounds_box.exterior.xy,
- color="red",
- linewidth=3,
-)
-
[12]:
-
-[<matplotlib.lines.Line2D at 0x7f324580d120>]
-
- |
- | - |
|
- - |
- | - |
- | - |
- |
- | - |
- |
|
-
- |
|
-
|
- - |
- | - |
- |
|
-
- | - |
- | - |
- | - |
- |
- | - |
- | - |
- |
xarray “… is particularly tailored to working with netCDF files, which were the source of xarray’s data model…” (http://xarray.pydata.org).
-For netCDF files, the GIS community uses CF conventions (http://cfconventions.org/).
-Additionally, GDAL also supports these attributes:
-spatial_ref (Well Known Text)
GeoTransform (GeoTransform array)
References:
-GDAL: https://gdal.org/drivers/raster/netcdf.html#georeference
pyproj: https://pyproj4.github.io/pyproj/stable/build_crs_cf.html
Operations on xarray objects can cause data loss. Due to this, rioxarray writes and expects the spatial reference information to exist in the coordinates.
-If you have opened a dataset and the Coordinate Reference System (CRS) can be determined, you can access it via the rio.crs
accessor.
Look in attributes (attrs
) of your data array for the grid_mapping
coordinate name. Inside the grid_mapping
coordinate first look for spatial_ref
then crs_wkt
and lastly the CF grid mapping attributes. This is in line with the Climate and Forecast (CF) conventions for storing the CRS as well as GDAL netCDF conventions.
Look in the crs
attribute and load in the CRS from there. This is for backwards compatibility with xarray.open_rasterio
, which is deprecated since version 0.20.0. We recommend using rioxarray.open_rasterio
instead.
The value for the crs
is anything accepted by rasterio.crs.CRS.from_user_input()
If the CRS is not found using the search methods above, it also searches the data_vars
and uses the first valid CRS found.
If you use one of xarray’s open methods such as xarray.open_dataset
to load netCDF files with the default engine, it is recommended to use decode_coords="all"
. This will load the grid mapping variable into coordinates for compatibility with rioxarray.
[1]:
-
import rioxarray # activate the rio accessor
-import xarray
-from affine import Affine
-
[2]:
-
rds = xarray.open_dataset("../../test/test_data/input/PLANET_SCOPE_3D.nc", decode_coords="all")
-
[3]:
-
rds.green.attrs
-
[3]:
-
-{'units': 'DN', 'nodata': 0.0}
-
[4]:
-
rds.green.spatial_ref
-
[4]:
-
<xarray.DataArray 'spatial_ref' ()> -array(0) -Coordinates: - spatial_ref int64 0 -Attributes: - spatial_ref: PROJCS["WGS 84 / UTM zone 22S",GEOGCS["WGS 84",DATUM["WGS_1...
array(0)
array(0)
[5]:
-
rds.green.rio.crs
-
[5]:
-
-CRS.from_epsg(32722)
-
Use the rio.write_crs
method to set the CRS on your xarray.Dataset
or xarray.DataArray
. This modifies the xarray.Dataset
or xarray.DataArray
and sets the CRS in a CF compliant manner.
[6]:
-
xda = xarray.DataArray(1)
-xda.rio.write_crs(4326, inplace=True)
-xda.spatial_ref
-
[6]:
-
<xarray.DataArray 'spatial_ref' ()> -array(0) -Coordinates: - spatial_ref int64 0 -Attributes: - crs_wkt: GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["... - semi_major_axis: 6378137.0 - semi_minor_axis: 6356752.314245179 - inverse_flattening: 298.257223563 - reference_ellipsoid_name: WGS 84 - longitude_of_prime_meridian: 0.0 - prime_meridian_name: Greenwich - geographic_crs_name: WGS 84 - grid_mapping_name: latitude_longitude - spatial_ref: GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["...
array(0)
array(0)
[7]:
-
xda.rio.crs
-
[7]:
-
-CRS.from_epsg(4326)
-
Only 1-dimensional X and Y dimensions are supported.
-The expected X/Y dimension names searched for in the coords
are:
x | y
longitude | latitude
Coordinates (coords
) with the CF attributes in attrs
:
axis: X | Y
standard_name: longitude | latitude or projection_x_coordinate | projection_y_coordinate
Option 1: Write the CF attributes for non-standard dimension names
-If you don’t want to rename your dimensions/coordinates, you can write the CF attributes so the coordinates can be found.
- -[ ]:
-
rds.rio.write_crs(
- 4326
- inplace=True,
-).rio.set_spatial_dims(
- x_dim="lon",
- y_dim="lat"
- inplace=True,
-).rio.write_coordinate_system(inplace=True)
-
Option 2: Rename your coordinates
- -[ ]:
-
rds = rds.rename(lon=longitute, lat=latitude)
-
The transform can be calculated from the coordinates of your data. This method is useful if your netCDF file does not have coordinates present. Use the rio.write_transform
method to set the transform on your xarray.Dataset
or xarray.DataArray
.
[8]:
-
transform = Affine(3.0, 0.0, 466266.0, 0.0, -3.0, 8084700.0)
-xda.rio.write_transform(transform, inplace=True)
-xda.spatial_ref.GeoTransform
-
[8]:
-
-'466266.0 3.0 0.0 8084700.0 0.0 -3.0'
-
[9]:
-
xda.rio.transform()
-
[9]:
-
-Affine(3.0, 0.0, 466266.0,
- 0.0, -3.0, 8084700.0)
-
Welcome! This page aims to help you gain a foundational understanding of rioxarray.
-rioxarray extends xarray -with the rio accessor. The rio accessor is activated by importing rioxarray like so:
-import rioxarray
-
You can learn how to clip, merge, and reproject rasters in the Usage Examples -section of the documentation. Need to export to a raster (GeoTiff)? There is an example for -that as well.
-Since rioxarray is an extension of xarray, you can load in files using the standard
-xarray open methods. If you use one of xarray’s open methods such as xarray.open_dataset
-to load netCDF files with the default engine, it is recommended to use decode_coords=”all”.
-This will load the grid mapping variable into coordinates for compatibility with rioxarray.
import xarray
-
-xds = xarray.open_dataset("file.nc", decode_coords="all")
-
rioxarray 0.4+ enables passing engine=”rasterio” to xarray.open_dataset
-and xarray.open_mfdataset
for xarray 0.18+. This uses
-rioxarray.open_rasterio()
as the backend and always returns an xarray.Dataset
.
import xarray
-
-xds = xarray.open_dataset("my.tif", engine="rasterio")
-
You can also use rioxarray.open_rasterio()
. This objects returned depend on
-your input file type.
import rioxarray
-
-xds = rioxarray.open_rasterio("my.tif")
-
Why use rioxarray.open_rasterio()
instead of xarray.open_rasterio?
It supports multidimensional datasets such as netCDF.
It stores the CRS as a WKT, which is the recommended format (PROJ FAQ).
It loads in the CRS, transform, and nodata metadata in standard CF & GDAL locations.
It supports masking and scaling data with the masked and mask_and_scale kwargs.
It adds the coordinate axis CF metadata.
It loads raster metadata into the attributes.
xarray.open_rasterio is deprecated (since v0.20.0)
Sometimes, you can lose important information from your dataset when performing operations. You will likely want to keep track of the attributes, nodata
, and CRS
.
API Reference:
-Note that write_transform
is only needed if you are not saving the x,y coordinates. It is for GDAL to be able to read in the transform without needing the original coordinates and is useful if you read in the file with parse_coordinates=False
.
[1]:
-
import rioxarray
-import xarray
-
See docs for rioxarray.open_rasterio
-[2]:
-
rds = rioxarray.open_rasterio(
- "../../test/test_data/input/PLANET_SCOPE_3D.nc",
- variable=["green"],
- mask_and_scale=True,
-)
-
Notice the original data:
-[3]:
-
rds.green.attrs, rds.green.encoding, rds.green.rio.crs, rds.green.rio.nodata
-
[3]:
-
-({'nodata': 0, 'units': ('DN', 'DN')},
- {'dtype': 'float64',
- 'grid_mapping': 'spatial_ref',
- 'scale_factor': 1.0,
- 'add_offset': 0.0,
- '_FillValue': nan,
- 'source': 'netcdf:../../test/test_data/input/PLANET_SCOPE_3D.nc:green'},
- CRS.from_epsg(32722),
- nan)
-
Notice how information is lost in the operation:
-[4]:
-
new_ds = rds.green + rds.green
-new_ds.attrs, new_ds.encoding, new_ds.rio.crs, new_ds.rio.nodata
-
[4]:
-
-({}, {}, CRS.from_epsg(32722), None)
-
To preserve attributes, xarray has set_options with keep_attrs=True
. However, it does not preserve the encoding.
[5]:
-
with xarray.set_options(keep_attrs=True):
- new_ds = rds.green + rds.green
-new_ds.attrs, new_ds.encoding, new_ds.rio.crs, new_ds.rio.nodata
-
[5]:
-
-({'nodata': 0, 'units': ('DN', 'DN')}, {}, CRS.from_epsg(32722), 0.0)
-
Another solution is to save the original attributes and then copy them over once the operation is complete:
-[6]:
-
new_ds = rds.green + rds.green
-new_ds.rio.write_crs(rds.green.rio.crs, inplace=True)
-new_ds.rio.update_attrs(rds.green.attrs, inplace=True)
-new_ds.rio.update_encoding(rds.green.encoding, inplace=True)
-new_ds.attrs, new_ds.encoding, new_ds.rio.crs, new_ds.rio.nodata
-
[6]:
-
-({'nodata': 0, 'units': ('DN', 'DN')},
- {'grid_mapping': 'spatial_ref',
- 'dtype': 'float64',
- 'scale_factor': 1.0,
- 'add_offset': 0.0,
- '_FillValue': nan,
- 'source': 'netcdf:../../test/test_data/input/PLANET_SCOPE_3D.nc:green'},
- CRS.from_epsg(32722),
- nan)
-
[7]:
-
new_ds.rio.to_raster("combination_keep_attrs.tif")
-
[8]:
-
!rio info combination_keep_attrs.tif
-
-{"bounds": [466266.0, 8084670.0, 466296.0, 8084700.0], "colorinterp": ["gray", "undefined"], "count": 2, "crs": "EPSG:32722", "descriptions": ["green", "green"], "driver": "GTiff", "dtype": "float64", "height": 10, "indexes": [1, 2], "interleave": "pixel", "lnglat": [-51.31732641226951, -17.322997474192466], "mask_flags": [["nodata"], ["nodata"]], "nodata": NaN, "res": [3.0, 3.0], "shape": [10, 10], "tiled": false, "transform": [3.0, 0.0, 466266.0, 0.0, -3.0, 8084700.0, 0.0, 0.0, 1.0], "units": [null, null], "width": 10}
-
If you have opened a dataset and the nodata value can be determined, you can access it via the rio.nodata
or rio.encoded_nodata
accessors.
If your dataset’s nodata value cannot be determined, you can use the rio.write_nodata
method.
Check if DataArray values are masked. If they are masked, return NaN
. If the DataArray is masked, the original nodata value can be retreived from rio.encoded_nodata
.
Look in attributes (attrs
) of your data array for the _FillValue
then missing_value
then fill_value
and finally nodata
.
Look in the nodatavals
attribute. This is for backwards compatibility with xarray.open_rasterio
. We recommend using rioxarray.open_rasterio
instead.
[1]:
-
import rioxarray
-import xarray
-
-file_path = "../../test/test_data/input/tmmx_20190121.nc"
-
In this case, the nodata value is in the attributes.
-[2]:
-
xds = xarray.open_dataset(file_path, mask_and_scale=False) # performs mask_and_scale by default
-rds = rioxarray.open_rasterio(file_path)
-
[3]:
-
print("nodata:")
-print(f"- xarray.open_dataset: {xds.air_temperature.rio.nodata}")
-print(f"- rioxarray.open_rasterio: {rds.air_temperature.rio.nodata}")
-print("\nencoded_nodata:")
-print(f"- xarray.open_dataset: {xds.air_temperature.rio.encoded_nodata}")
-print(f"- rioxarray.open_rasterio: {rds.air_temperature.rio.encoded_nodata}")
-
-nodata:
-- xarray.open_dataset: 32767
-- rioxarray.open_rasterio: 32767
-
-encoded_nodata:
-- xarray.open_dataset: None
-- rioxarray.open_rasterio: None
-
[4]:
-
print("attributes:")
-print(f"\n- xarray.open_dataset:\n {xds.air_temperature.attrs}")
-print(f"\n- rioxarray.open_rasterio:\n {rds.air_temperature.attrs}")
-
-attributes:
-
-- xarray.open_dataset:
- {'_FillValue': 32767, 'units': 'K', 'description': 'Daily Maximum Temperature', 'long_name': 'tmmx', 'standard_name': 'tmmx', 'missing_value': 32767, 'dimensions': 'lon lat time', 'grid_mapping': 'crs', 'coordinate_system': 'WGS84,EPSG:4326', 'scale_factor': 0.1, 'add_offset': 220.0, '_Unsigned': 'true'}
-
-- rioxarray.open_rasterio:
- {'add_offset': 220.0, 'coordinates': 'day', 'coordinate_system': 'WGS84,EPSG:4326', 'description': 'Daily Maximum Temperature', 'dimensions': 'lon lat time', 'long_name': 'tmmx', 'missing_value': 32767, 'scale_factor': 0.1, 'standard_name': 'tmmx', 'units': 'K', '_FillValue': 32767.0, '_Unsigned': 'true'}
-
When the dataset is opened with mask_and_scale=True
with rioxarray.open_rasterio
or xarray.open_dataset
, the nodata metadata is written to the encoding attribute. Then, when the dataset is written using to_netcdf
or rio.to_raster
the data is decoded and it writes the original nodata value to the raster.
When this happens, rio.nodata
returns numpy.nan
and rio.encoded_nodata
contains the original value.
[5]:
-
xds = xarray.open_dataset(file_path) # performs mask_and_scale by default
-rds = rioxarray.open_rasterio(file_path, mask_and_scale=True)
-
-/home/snowal/miniconda/envs/midas/lib/python3.10/site-packages/rioxarray/_io.py:618: SerializationWarning: variable 'air_temperature' has _Unsigned attribute but is not of integer type. Ignoring attribute.
- rioda = open_rasterio(
-
[6]:
-
print("nodata:")
-print(f"- xarray.open_dataset: {xds.air_temperature.rio.nodata}")
-print(f"- rioxarray.open_rasterio: {rds.air_temperature.rio.nodata}")
-print("\nencoded_nodata:")
-print(f"- xarray.open_dataset: {xds.air_temperature.rio.encoded_nodata}")
-print(f"- rioxarray.open_rasterio: {rds.air_temperature.rio.encoded_nodata}")
-
-nodata:
-- xarray.open_dataset: nan
-- rioxarray.open_rasterio: nan
-
-encoded_nodata:
-- xarray.open_dataset: 32767.0
-- rioxarray.open_rasterio: 32767.0
-
[7]:
-
print("attributes:")
-print(f"\n- xarray.open_dataset:\n {xds.air_temperature.attrs}")
-print(f"\n- rioxarray.open_rasterio:\n {rds.air_temperature.attrs}")
-
-attributes:
-
-- xarray.open_dataset:
- {'units': 'K', 'description': 'Daily Maximum Temperature', 'long_name': 'tmmx', 'standard_name': 'tmmx', 'dimensions': 'lon lat time', 'grid_mapping': 'crs', 'coordinate_system': 'WGS84,EPSG:4326'}
-
-- rioxarray.open_rasterio:
- {'coordinates': 'day', 'coordinate_system': 'WGS84,EPSG:4326', 'description': 'Daily Maximum Temperature', 'dimensions': 'lon lat time', 'long_name': 'tmmx', 'standard_name': 'tmmx', 'units': 'K'}
-
[8]:
-
print("encoding:")
-print(f"\n- xarray.open_dataset:\n {xds.air_temperature.encoding}")
-print(f"\n- rioxarray.open_rasterio:\n {rds.air_temperature.encoding}")
-
-encoding:
-
-- xarray.open_dataset:
- {'zlib': True, 'shuffle': True, 'complevel': 5, 'fletcher32': False, 'contiguous': False, 'chunksizes': (585, 1386), 'source': '/home/snowal/scripts/rioxarray/test/test_data/input/tmmx_20190121.nc', 'original_shape': (585, 1386), 'dtype': dtype('uint16'), '_Unsigned': 'true', 'missing_value': 32767, '_FillValue': 32767, 'scale_factor': 0.1, 'add_offset': 220.0, 'coordinates': 'day'}
-
-- rioxarray.open_rasterio:
- {'_Unsigned': 'true', 'dtype': 'uint16', 'grid_mapping': 'crs', 'scale_factor': 0.1, 'add_offset': 220.0, '_FillValue': 32767.0, 'missing_value': 32767, 'source': 'netcdf:../../test/test_data/input/tmmx_20190121.nc:air_temperature', 'rasterio_dtype': 'uint16'}
-
If you use xarray.where
to mask you data, then you need to ensure that the attributes stored on the DataArray reflect the correct values. rio.write_nodata() can help ensure that the nodata attributes are written correctly.
[9]:
-
xds = xarray.open_dataset(file_path, mask_and_scale=False) # performs mask_and_scale by default
-raster = xds.air_temperature
-raster = raster.where(raster != raster.rio.nodata)
-# nodata does not reflect the data has been masked
-print(f"nodata: {raster.rio.nodata}")
-print(f"encoded_nodata: {raster.rio.encoded_nodata}")
-
-nodata: 32767.0
-encoded_nodata: None
-
[10]:
-
# update nodata value to show the data has been masked
-raster.rio.write_nodata(raster.rio.nodata, encoded=True, inplace=True)
-print(f"nodata: {raster.rio.nodata}")
-print(f"encoded_nodata: {raster.rio.encoded_nodata}")
-
-nodata: nan
-encoded_nodata: 32767.0
-
BUG: Fix setting spatial dims internally during propagation (pull #682)
ENH: Pass on on-disk chunk sizes as preferred chunk sizes to the xarray backend (pull #678)
MNT: add __all__ to top level module (issue #680)
BUG: Fix rioxarray.merge
CRS check (pull #655)
BUG: Remove tags with metadata added by rasterio in rioxarray.open_rasterio()
(issue #666)
DEP: Drop Python 3.8 support (issue #582)
DEP: pin rasterio>=1.2 (pull #642)
BUG: Fix WarpedVRT in rioxarray.open_rasterio()
when band_as_variable=True (issue #644)
BUG: Fix usage of encode_cf_variable in rio.to_raster (pull #652)
DEP: pin numpy>=1.21 (pull #636)
BUG: Handle data type error in rio.reproject (issue #618)
BUG:dataset: Fix writing tags for bands (issue #615)
BUG:dataset: prevent overwriting long_name attribute (pull #616)
BUG: Fix closing files manually (pull #607)
BUG: Add GDAL 3.6 driver auto-select fix (pull #606)
ENH: Added band_as_variable option to open_rasterio (pull #600)
ENH: Added band_as_variable option to open_rasterio (issue #296)
BUG: Pass warp_extras dictionary to raster.vrt.WarpedVRT (issue #598)
BUG: Handle CF CRS export errors in rio.write_crs (discussion #591)
BUG: Fix mask_and_scale data load after .sel (issue #580)
BUG: Handle _Unsigned and load in all attributes (pull #575)
ENH: Allow passing in bounds of different CRS in rio.clip_box (pull #563)
BUG: Fix reading file handle with dask (issue #550)
BUG: Fix reading cint16 files with dask (issue #542)
BUG: Ensure rio.bounds ordered correctly (issue #545)
BUG: Allow reading from io.BytesIO (issue #549)
BUG: Fix WarpedVRT param cache in rioxarray.open_rasterio()
(issue #515)
BUG: Always generate coordinates in rio.reproject when GCPS|RPCS present (issue #517)
TYPE: Add more type hints (issue #373)
ENH: Add additional GDAL information to rioxarray.show_versions()
(pull #513)
BUG: Remove xarray crs attribute in rio.write_crs (issue #488)
BUG: Lazy load colormap through _manager.acquire() in merge (issue #479)
DEP: pin rasterio>=1.1.1 (pull #471)
BUG: Corrected bounds and transform args to float (pull #475)
DEP: Drop Python 3.7 support (issue #451)
ENH: Add GCPs reading and writing (issue #376)
BUG: Force coordinates to be exactly the same in rio.reproject_match (issue #298)
ENH: Allow additional kwargs to pass from reproject_match() -> reproject() (pull #436)
DEP: Make scipy an optional dependency (issue #413)
BUG: Return cached transform when axis data missing (pull #419)
BUG: Fix negative indexes in rio.isel_window (issue #421)
BUG: Handle transforms with rotation (pull #401)
BUG: rio.clip and rio.clip_box skip non-geospatial arrays in datasets when clipping (pull #392)
ENH: Add option for users to skip variables without spatial dimensions (pull #395)
BUG: Fix indexing error when mask_and_scale=True was combined with band dim chunking (issue #387, pull #388)
ENH: Add pad option to rio.isel_window (issue #381; pull #383)
BUG: Fix negative start in row or col window offsets in rio.isel_window (issue #381; pull #383)
ENH: Allow passing in kwargs to rio.reproject (issue #369; pull #370)
ENH: Allow nodata override and provide default nodata based on dtype in rio.reproject (pull #370)
ENH: Add support for passing in gcps to rio.reproject (issue #339; pull #370)
BUG: Remove duplicate acquire in open_rasterio (pull #364)
BUG: Fix exporting dataset to raster with non-standard dimensions (issue #372)
BUG: support GDAL CInt16, rasterio complex_int16 (pull #353)
TST: Fix merge tests for rasterio 1.2.5+ (issue #358)
BUG: Improve WarpedVRT support for gcps (pull #351)
BUG: pass kwargs with lock=False (issue #344)
BUG: Close file handle with lock=False (pull #346)
DEP: Python 3.7+ (issue #215)
DEP: xarray 0.17+ (needed for issue #282)
REF: Store grid_mapping in encoding instead of attrs (issue #282)
ENH: enable engine=”rasterio” via xarray backend API (issue #197 pull #281)
ENH: Generate 2D coordinates for non-rectilinear sources (issue #290)
ENH: Add encoded kwarg to rio.write_nodata (discussions #313)
ENH: Added decode_times and decode_timedelta kwargs to rioxarray.open_rasterio (issue #316)
BUG: Use float32 for smaller dtypes when masking (discussions #302)
BUG: Return correct transform in rio.transform with non-rectilinear transform (discussions #280)
BUG: Update to handle WindowError in rasterio 1.2.2 (issue #286)
BUG: Don’t generate x,y coords in rio methods if not previously there (pull #294)
BUG: Preserve original data type for writing to disk (issue #305)
BUG: handle lock=True in open_rasterio (issue #273)
BUG: Compatibility changes with xarray 0.17 (issue #254)
BUG: Raise informative error in interpolate_na if missing nodata (#250)
REF: Reduce pyproj.CRS internal usage for speed (issue #241)
ENH: Add rioxarray.set_options to disable exporting CRS CF grid mapping (issue #241)
BUG: Handle merging 2D DataArray (discussion #244)
ENH: Added rio.estimate_utm_crs (issue #181)
ENH: Add support for merging datasets with different CRS (issue #173)
ENH: Add support for using dask in rio.to_raster (issue #9, pull #219, pull #223)
ENH: Use the list version of transform_geom with rasterio 1.2+ (issue #180)
ENH: Support driver autodetection with rasterio 1.2+ (issue #180)
ENH: Allow multithreaded, lockless reads with rioxarray.open_rasterio (issue #214)
ENH: Add support to clip from disk (issue #115)
BUG: Allow rio.write_crs when spatial dimensions not found (pull #186)
BUG: Update to support rasterio 1.2+ merge (issue #180)
BUG: Check all CRS are the same in the dataset in crs() method
BUG: Ensure transform correct in rio.clip without coords (pull #165)
BUG: Ensure the nodata value matches the dtype (pull #166)
Raise deprecation exception in add_spatial_ref and add_xy_grid_meta (pull #168)
Deprecate add_spatial_ref and fix warning for add_xy_grid_meta (pull #158)
BUG: Fix assigning fill value in rio.pad_box (pull #140)
ENH: Add rio.write_transform to store cache in GDAL location (issue #129 & #139)
ENH: Use rasterio windows for rio.clip_box (issue #142)
BUG: Add support for negative indexes in rio.isel_window (pull #145)
BUG: Write transform based on window in rio.isel_window (pull #145)
ENH: Add rio.count, rio.slice_xy(), rio.bounds(), rio.resolution(), rio.transform_bounds() to Dataset level
ENH: Add rio.write_coordinate_system() (issue #147)
ENH: Search CF coordinate metadata to find coordinates (issue #147)
ENH: Default rio.clip to assume geometry has CRS of dataset (pull #150)
ENH: Add rio.grid_mapping and rio.write_grid_mapping & preserve original grid mapping (pull #151)
BUG: Remove unnecessary memory copies in reproject method (pull #136)
BUG: Fix order of axis in rio.isel_window (pull #133)
BUG: Allow clipping with disjoint geometries (issue #132)
BUG: Remove automatically setting tiled=True for windowed writing (pull #134)
ENH: Add rio.pad_box (pull #138)
rio.reproject: change input kwarg dst_affine_width_height -> shape & transform (#125)
ENH: Use pyproj.CRS to read/write CF parameters (issue #124)
ENH: Added optional shape argument to rio.reproject (pull #116)
Fix RasterioDeprecationWarning
(pull #117)
BUG: Make rio.shape order same as rasterio dataset shape (height, width) (pull #121)
Fix open_rasterio() for WarpedVRT with specified src_crs (pydata/xarray/pull/4104 & pull #120)
BUG: Use internal reprojection as engine for resampling window in merge (pull #123)
ENH: Added rioxarray.show_versions()
(issue #106)
BUG: Use recalc=True when using transform internally & ensure stable when coordinates unavailable. (issue #97)
ENH: Add variable names to error messages for clarity (pull #99)
BUG: Use assign_coords in _decode_datetime_cf (issue #101)
BUG: Fix ‘rio.set_spatial_dims’ so information saved with ‘rio’ accesors (issue #94)
ENH: Make ‘rio.isel_window’ available for datasets (pull #95)
ENH: Use pyproj.CRS internally to manage GDAL 2/3 transition (issue #92)
ENH: Add MissingCRS exceptions for ‘rio.clip’ and ‘rio.reproject’ (pull #93)
ENH: Added to_raster method for Datasets (issue #76)
BUG: ensure band_key is list when iterating over bands for mask and scale (pull #87)
Add support for writing scales & offsets to raster (pull #79)
Don’t write standard raster metadata to raster tags (issue #78)
Fixed windowed writing to require tiled output raster (pull #66)
Write data array attributes using rio.to_raster (issue #64)
Write variable name to descriptions if possible in rio.to_raster (issue #64)
Add mask_and_scale option to rioxarray.open_rasterio() (issue #67)
Hide NotGeoreferencedWarning warning when subdatasets are present using open_rasterio (issue #65)
Add support for loading in 1D variables in xarray.open_rasterio() (issue #43)
Load in netCDF metadata on the variable level (pull #73)
Add rioxarray.merge module (issue #46)
Renamed descriptions to long_name when opening with open_rasterio() (pull #63)
Make units & long_name scalar if they exist in rasterio attributes (pull #63)
Add support for netcdf/hdf groups with different shapes (pull #62)
Added variable and group kwargs to rioxarray.open_rasterio() to allow filtering of subdatasets (pull #57)
Added default_name kwarg to rioxarray.open_rasterio() for backup when the original does not exist (pull #59)
Added recalc_transform kwarg to rio.to_raster() (pull #56)
Added windowed kwarg to rio.to_raster() to write to raster using windowed writing (pull #54)
Added add rio.isel_window() to allow selection using a rasterio.windows.Window (pull #54)
Improve CRS searching for xarray.Dataset & use default grid mapping name (pull #51)
Use xarray.open_rasterio() for rioxarray.open_rasterio() with xarray<0.12.3 (pull #40)
Added open_kwargs to pass into rasterio.open() when using rioxarray.open_rasterio() (pull #48)
Added example opening Cloud Optimized GeoTiff (issue #45)
Add support for opening netcdf/hdf files with rioxarray.open_rasterio (issue #32)
Added support for custom CRS with wkt attribute for datacube CRS support (issue #35)
Added rio.set_nodata(), rio.write_nodata(), rio.set_attrs(), rio.update_attrs() (issue #37)
Add rioxarray.open_rasterio (issue #7)
Fix setting nodata in _add_attrs_proj (pull #30)
Add option to do an inverted clip (pull #29)
Add support for scalar coordinates in reproject (issue #15)
Updated writing encoding for FutureWarning (issue #18)
Use input raster profile for defaults to write output raster profile if opened with xarray.open_rasterio (issue #19)
Preserve None nodata if opened with xarray.open_rasterio (issue #20)
Added drop argument for clip() (issue #25)
Fix order of CRS for reprojecting geometries in clip() (pull #24)
Added set_spatial_dims() method for datasets when dimensions not found (issue #27)
Find nodata and nodatavals in ‘nodata’ property (pull #12)
Added ‘encoded_nodata’ property to DataArray (pull #12)
Write the raster with encoded_nodata instead of NaN for nodata (pull #12)
Added methods to set and write CRS (issue #5)
Added ability to export data array to raster (pull #8)
Use pip to install package from PyPI:
----pip install rioxarray -
Use conda with the conda-forge channel:
----conda config --prepend channels conda-forge -conda config --set channel_priority strict -conda create -n rioxarray_env rioxarray -conda activate rioxarray_env --
-- -
rioxarray conda-forge repository
--Note
-“… we recommend always installing your packages inside a -new environment instead of the base environment from -anaconda/miniconda. Using envs make it easier to -debug problems with packages and ensure the stability -of your root env.”
-----Warning
-Avoid using pip install with a conda environment. If you encounter -a python package that isn’t in conda-forge, consider submitting a -recipe: https://github.com/conda-forge/staged-recipes/
-
The source for rioxarray can be installed from the GitHub repo.
-python -m pip install git+git://github.com/corteva/rioxarray.git#egg=rioxarray
-
To install for local development:
-git clone git@github.com:corteva/rioxarray.git
-cd rioxarray
-python -m pip install -e .[dev]
-
set_options
XRasterBase
XRasterBase.bounds()
XRasterBase.count
XRasterBase.crs
XRasterBase.estimate_utm_crs()
XRasterBase.get_gcps()
XRasterBase.grid_mapping
XRasterBase.height
XRasterBase.isel_window()
XRasterBase.resolution()
XRasterBase.set_attrs()
XRasterBase.set_crs()
XRasterBase.set_encoding()
XRasterBase.set_spatial_dims()
XRasterBase.shape
XRasterBase.slice_xy()
XRasterBase.transform()
XRasterBase.transform_bounds()
XRasterBase.update_attrs()
XRasterBase.update_encoding()
XRasterBase.width
XRasterBase.write_coordinate_system()
XRasterBase.write_crs()
XRasterBase.write_gcps()
XRasterBase.write_grid_mapping()
XRasterBase.write_transform()
XRasterBase.x_dim
XRasterBase.y_dim
RasterArray
RasterArray.clip()
RasterArray.clip_box()
RasterArray.encoded_nodata
RasterArray.interpolate_na()
RasterArray.nodata
RasterArray.pad_box()
RasterArray.pad_xy()
RasterArray.reproject()
RasterArray.reproject_match()
RasterArray.set_nodata()
RasterArray.to_raster()
RasterArray.write_nodata()
RasterDataset
-- r | ||
- |
- rioxarray | - |
- |
- rioxarray.exceptions | - |
rasterio xarray extension.
- - - - - - - - - - - - -Report bugs/feature requests: https://github.com/corteva/rioxarray/issues
Ask questions: https://github.com/corteva/rioxarray/discussions
Ask developer questions: https://gitter.im/rioxarray/community
Ask questions from the GIS community: https://gis.stackexchange.com/questions/tagged/rioxarray
Source file: geo_xarray.py
datacube is licensed under the Apache License, Version 2.0. -The datacube license is included as LICENSE_datacube.
open_rasterio: rasterio_.py
set_options: options.py
xarray is licensed under the Apache License, Version 2.0. -The xarray license is included as LICENSE_xarray.
Source file: write.py
This package was originally templated with with Cookiecutter.
-Open a file with rasterio (experimental).
-This should work with any file that rasterio can open (most often: -geoTIFF). The x and y coordinates are generated automatically from the -file’s geoinformation, shifted to the center of each pixel (see -“PixelIsArea” Raster Space -for more information).
-New in version 0.13: band_as_variable
-filename (str, rasterio.io.DatasetReader, or rasterio.vrt.WarpedVRT) – Path to the file to open. Or already open rasterio dataset.
parse_coordinates (bool, optional) – Whether to parse the x and y coordinates out of the file’s
-transform
attribute or not. The default is to automatically
-parse the coordinates only if they are rectilinear (1D).
-It can be useful to set parse_coordinates=False
-if your files are very large or if you don’t need the coordinates.
chunks (int, tuple or dict, optional) – Chunk sizes along each dimension, e.g., 5
, (5, 5)
or
-{'x': 5, 'y': 5}
. If chunks is provided, it used to load the new
-DataArray into a dask array. Chunks can also be set to
-True
or "auto"
to choose sensible chunk sizes according to
-dask.config.get("array.chunk-size")
.
cache (bool, optional) – If True, cache data loaded from the underlying datastore in memory as -NumPy arrays when accessed to avoid reading from the underlying data- -store multiple times. Defaults to True unless you specify the chunks -argument to use dask, in which case it defaults to False.
lock (bool or dask.utils.SerializableLock, optional) –
If chunks is provided, this argument is used to ensure that only one -thread per process is reading from a rasterio file object at a time.
-By default and when a lock instance is provided,
-a xarray.backends.CachingFileManager
is used to cache File objects.
-Since rasterio also caches some data, this will make repeated reads from the
-same object fast.
When lock=False
, no lock is used, allowing for completely parallel reads
-from multiple threads or processes. However, a new file handle is opened on
-each request.
masked (bool, optional) – If True, read the mask and set values to NaN. Defaults to False.
mask_and_scale (bool, default=False) – Lazily scale (using the scales and offsets from rasterio) and mask. -If the _Unsigned attribute is present treat integer arrays as unsigned.
variable (str or list or tuple, optional) – Variable name or names to use to filter loading.
group (str or list or tuple, optional) – Group name or names to use to filter loading.
default_name (str, optional) – The name of the data array if none exists. Default is None.
decode_times (bool, default=True) – If True, decode times encoded in the standard NetCDF datetime format -into datetime objects. Otherwise, leave them encoded as numbers.
decode_timedelta (bool, optional) – If True, decode variables and coordinates with time units in -{“days”, “hours”, “minutes”, “seconds”, “milliseconds”, “microseconds”} -into timedelta objects. If False, leave them encoded as numbers. -If None (default), assume the same value of decode_time.
band_as_variable (bool, default=False) – If True, will load bands in a raster to separate variables.
**open_kwargs (kwargs, optional) – Optional keyword arguments to pass into rasterio.open()
.
The newly created dataset(s).
-xarray.Dataset
| xarray.DataArray
| list[xarray.Dataset
]
Merge data arrays geospatially.
- -New in version 0.2: crs
-dataarrays (list[xarray.DataArray]) – List of xarray.DataArray’s with all geo attributes. -The first one is assumed to have the same -CRS, dtype, and dimensions as the others in the array.
bounds (tuple, optional) – Bounds of the output image (left, bottom, right, top). -If not set, bounds are determined from bounds of input DataArrays.
res (tuple, optional) – Output resolution in units of coordinate reference system. -If not set, the resolution of the first DataArray is used. -If a single value is passed, output pixels will be square.
nodata (float, optional) – nodata value to use in output file. -If not set, uses the nodata value in the first input DataArray.
precision (float, optional) – Number of decimal points of precision when computing inverse transform.
method (str or callable, optional) – See rasterio.merge.merge()
for details.
crs (rasterio.crs.CRS, optional) – Output CRS. If not set, the CRS of the first DataArray is used.
parse_coordinates (bool, optional) – If False, it will disable loading spatial coordinates.
The geospatially merged data.
-Merge datasets geospatially.
- -New in version 0.2: crs
-datasets (list[xarray.Dataset]) – List of xarray.Dataset’s with all geo attributes. -The first one is assumed to have the same -CRS, dtype, dimensions, and data_vars as the others in the array.
bounds (tuple, optional) – Bounds of the output image (left, bottom, right, top). -If not set, bounds are determined from bounds of input Dataset.
res (tuple, optional) – Output resolution in units of coordinate reference system. -If not set, the resolution of the first Dataset is used. -If a single value is passed, output pixels will be square.
nodata (float, optional) – nodata value to use in output file. -If not set, uses the nodata value in the first input Dataset.
precision (float, optional) – Number of decimal points of precision when computing inverse transform.
method (str or callable, optional) – See rasterio docs.
crs (rasterio.crs.CRS, optional) – Output CRS. If not set, the CRS of the first DataArray is used.
The geospatially merged data.
-Set the global rioxarray option.
-New in version 0.3.0.
-New in version 0.7.0: skip_missing_spatial_dims
-export_grid_mapping (bool, default=True) – If True, this option will export the full Climate and Forecasts (CF)
-grid mapping attributes for the CRS. This is useful if you are exporting
-your file to netCDF using xarray.Dataset.to_netcdf()
. When disabled,
-only the crs_wkt
and spatial_ref
attributes will be written and the
-program will be faster due to not needing to use
-pyproj.CRS.to_cf()
.
skip_missing_spatial_dims (bool, default=False) – If True, it will not perform spatial operations on variables
-within a xarray.Dataset
if the spatial dimensions
-are not found.
Usage as a context manager:
-with rioxarray.set_options(export_grid_mapping=False):
- rds = rioxarray.open_rasterio(...)
-
Usage for global settings:
-rioxarray.set_options(export_grid_mapping=False)
-
rioxarray extends xarray -with the rio accessor. The rio accessor is activated by importing rioxarray like so:
-import rioxarray
-
Bases: object
This is the base class for the GIS extensions for xarray
-recalc (bool, optional) – Will force the bounds to be recalculated instead of using the -transform attribute.
-left, bottom, right, top – Outermost coordinates of the xarray.DataArray | xarray.Dataset.
-float
-Returns the band count (z dimension size)
-int
-rasterio.crs.CRS
:
-Retrieve projection from xarray.Dataset
| xarray.DataArray
Returns the estimated UTM CRS based on the bounds of the dataset.
-New in version 0.2.
-Note
-Requires pyproj 3+
-datum_name (str, optional) – The name of the datum to use in the query. Default is WGS 84.
-Get the GroundControlPoints from the dataset.
-https://rasterio.readthedocs.io/en/latest/topics/georeferencing.html#ground-control-points
-The Ground Control Points from the dataset or None if not applicable
-list of rasterio.control.GroundControlPoint
or None
The CF grid_mapping attribute. ‘spatial_ref’ is the default.
-str
-Returns the height of the dataset (y dimension size)
-int
-Use a rasterio.windows.Window to select a subset of the data.
-New in version 0.6.0: pad
-Warning
-Float indices are converted to integers.
-window (rasterio.windows.Window
) – The window of the dataset to read.
pad (bool, default=False) – Set to True to expand returned DataArray to dimensions of the window
The data in the window.
-Determine if the resolution of the grid. -If the transformation has rotation, the sign of the resolution is lost.
-recalc (bool, optional) – Will force the resolution to be recalculated instead of using the -transform attribute.
-x_resolution, y_resolution – The resolution of the xarray.DataArray | xarray.Dataset
-float
-Set the attributes of the dataset/dataarray and reset -rioxarray properties to re-search for them.
-new_attrs (dict) – A dictionary of new attributes.
inplace (bool, optional) – If True, it will write to the existing dataset. Default is False.
Modified dataset with new attributes.
-Set the CRS value for the Dataset/DataArray without modifying -the dataset/data array.
-input_crs (object) – Anything accepted by rasterio.crs.CRS.from_user_input.
inplace (bool, optional) – If True, it will write to the existing dataset. Default is False.
Dataset with crs attribute.
-Set the encoding of the dataset/dataarray and reset -rioxarray properties to re-search for them.
-New in version 0.4.
-new_encoding (dict) – A dictionary for encoding.
inplace (bool, optional) – If True, it will write to the existing dataset. Default is False.
Modified dataset with new attributes.
-This sets the spatial dimensions of the dataset.
-x_dim (str) – The name of the x dimension.
y_dim (str) – The name of the y dimension.
inplace (bool, optional) – If True, it will modify the dataframe in place. -Otherwise it will return a modified copy.
Dataset with spatial dimensions set.
-Returns the shape (height, width)
-tuple(int, int)
-Slice the array by x,y bounds.
-minx (float) – Minimum bound for x coordinate.
miny (float) – Minimum bound for y coordinate.
maxx (float) – Maximum bound for x coordinate.
maxy (float) – Maximum bound for y coordinate.
The data in the slice.
-recalc (bool, optional) – If True, it will re-calculate the transform instead of using -the cached transform.
-The affine of the xarray.Dataset
| xarray.DataArray
affine.Affine
Transform bounds from src_crs to dst_crs.
-Optionally densifying the edges (to account for nonlinear transformations -along these edges) and extracting the outermost bounds.
-Note: this does not account for the antimeridian.
-dst_crs (str, rasterio.crs.CRS
, or dict) – Target coordinate reference system.
densify_pts (uint, optional) – Number of points to add to each edge to account for nonlinear -edges produced by the transform process. Large numbers will produce -worse performance. Default: 21 (gdal default).
recalc (bool, optional) – Will force the bounds to be recalculated instead of using the transform -attribute.
left, bottom, right, top – Outermost coordinates in target coordinate reference system.
-float
-Update the attributes of the dataset/dataarray and reset -rioxarray properties to re-search for them.
-new_attrs (dict) – A dictionary of new attributes to update with.
inplace (bool, optional) – If True, it will write to the existing dataset. Default is False.
Modified dataset with updated attributes.
-Update the encoding of the dataset/dataarray and reset -rioxarray properties to re-search for them.
-New in version 0.4.
-new_encoding (dict) – A dictionary with encoding values to update with.
inplace (bool, optional) – If True, it will write to the existing dataset. Default is False.
Modified dataset with updated attributes.
-Returns the width of the dataset (x dimension size)
-int
-Write the coordinate system CF metadata.
-New in version 0.0.30.
-inplace (bool, optional) – If True, it will write to the existing dataset. Default is False.
-The dataset with the CF coordinate system attributes added.
-Write the CRS to the dataset in a CF compliant manner.
-Warning
-The grid_mapping attribute is written to the encoding.
-input_crs (Any) – Anything accepted by rasterio.crs.CRS.from_user_input.
grid_mapping_name (str, optional) – Name of the grid_mapping coordinate to store the CRS information in. -Default is the grid_mapping name of the dataset.
inplace (bool, optional) – If True, it will write to the existing dataset. Default is False.
Modified dataset with CF compliant CRS information.
-Examples
-Write the CRS of the current xarray object:
->>> raster.rio.write_crs("epsg:4326", inplace=True)
-
Write the CRS on a copy:
->>> raster = raster.rio.write_crs("epsg:4326")
-
Write the GroundControlPoints to the dataset.
-https://rasterio.readthedocs.io/en/latest/topics/georeferencing.html#ground-control-points
-gcp (list of rasterio.control.GroundControlPoint
) – The Ground Control Points to integrate to the dataset.
gcp_crs (str, rasterio.crs.CRS
, or dict) – Coordinate reference system for the GCPs.
grid_mapping_name (str, optional) – Name of the grid_mapping coordinate to store the GCPs information in. -Default is the grid_mapping name of the dataset.
inplace (bool, optional) – If True, it will write to the existing dataset. Default is False.
Modified dataset with Ground Control Points written.
-Write the CF grid_mapping attribute to the encoding.
-grid_mapping_name (str, optional) – Name of the grid_mapping coordinate.
inplace (bool, optional) – If True, it will write to the existing dataset. Default is False.
Modified dataset with CF compliant CRS information.
-New in version 0.0.30.
-Write the GeoTransform to the dataset where GDAL can read it in.
-https://gdal.org/drivers/raster/netcdf.html#georeference
-transform (affine.Affine, optional) – The transform of the dataset. If not provided, it will be calculated.
grid_mapping_name (str, optional) – Name of the grid_mapping coordinate to store the transform information in. -Default is the grid_mapping name of the dataset.
inplace (bool, optional) – If True, it will write to the existing dataset. Default is False.
Modified dataset with Geo Transform written.
-The dimension for the X-axis.
-Hashable
-The dimension for the Y-axis.
-Hashable
-Bases: XRasterBase
This is the GIS extension for xarray.DataArray
Crops a xarray.DataArray
by geojson like geometry dicts.
Powered by rasterio.features.geometry_mask.
-Examples
->>> geometry = ''' {"type": "Polygon",
-... "coordinates": [
-... [[-94.07955380199459, 41.69085871273774],
-... [-94.06082436942204, 41.69103313774798],
-... [-94.06063203899649, 41.67932439500822],
-... [-94.07935807746362, 41.679150041277325],
-... [-94.07955380199459, 41.69085871273774]]]}'''
->>> cropping_geometries = [geojson.loads(geometry)]
->>> xds = xarray.open_rasterio('cool_raster.tif')
->>> cropped = xds.rio.clip(geometries=cropping_geometries, crs=4326)
-
New in version 0.2: from_disk
-geometries (Iterable) – A list of geojson geometry dicts or objects with __geo_interface__ with -if you have rasterio 1.2+.
crs (rasterio.crs.CRS
, optional) – The CRS of the input geometries. Default is to assume it is the same
-as the dataset.
all_touched (bool, optional) – If True, all pixels touched by geometries will be burned in. If -false, only pixels whose center is within the polygon or that -are selected by Bresenham’s line algorithm will be burned in.
drop (bool, optional) – If True, drop the data outside of the extent of the mask geoemtries -Otherwise, it will return the same raster with the data masked. -Default is True.
invert (boolean, optional) – If False, pixels that do not overlap shapes will be set as nodata. -Otherwise, pixels that overlap the shapes will be set as nodata. -False by default.
from_disk (boolean, optional) – If True, it will clip from disk using rasterio.mask.mask if possible. -This is beneficial when the size of the data is larger than memory. -Default is False.
The clipped object.
-Clip the xarray.DataArray
by a bounding box.
New in version 0.12: crs
-minx (float) – Minimum bound for x coordinate.
miny (float) – Minimum bound for y coordinate.
maxx (float) – Maximum bound for x coordinate.
maxy (float) – Maximum bound for y coordinate.
auto_expand (Union[bool, int]) – If True, it will expand clip search if only 1D raster found with clip.
auto_expand_limit (int) – maximum number of times the clip will be retried before raising -an exception.
crs (rasterio.crs.CRS
, optional) – The CRS of the bounding box. Default is to assume it is the same
-as the dataset.
The clipped object.
-Return the encoded nodata value for the dataset if encoded.
-This method uses scipy.interpolate.griddata to interpolate missing data.
-Warning
-scipy is an optional dependency.
-method ({'linear', 'nearest', 'cubic'}, optional) – The method to use for interpolation in scipy.interpolate.griddata.
-An interpolated xarray.DataArray
object.
Get the nodata value for the dataset.
-Pad the xarray.DataArray
to a bounding box
New in version 0.0.29.
-minx (float) – Minimum bound for x coordinate.
miny (float) – Minimum bound for y coordinate.
maxx (float) – Maximum bound for x coordinate.
maxy (float) – Maximum bound for y coordinate.
constant_values (scalar, tuple or mapping of hashable to tuple) – The value used for padding. If None, nodata will be used if it is -set, and numpy.nan otherwise.
The padded object.
-Pad the array to x,y bounds.
-New in version 0.0.29.
-minx (float) – Minimum bound for x coordinate.
miny (float) – Minimum bound for y coordinate.
maxx (float) – Maximum bound for x coordinate.
maxy (float) – Maximum bound for y coordinate.
constant_values (scalar, tuple or mapping of hashable to tuple) – The value used for padding. If None, nodata will be used if it is -set, and numpy.nan otherwise.
The padded object.
-Reproject xarray.DataArray
objects
Powered by rasterio.warp.reproject()
Note
-Only 2D/3D arrays with dimensions ‘x’/’y’ are currently supported. -Requires either a grid mapping variable with ‘spatial_ref’ or -a ‘crs’ attribute to be set containing a valid CRS. -If using a WKT (e.g. from spatiareference.org), make sure it is an OGC WKT.
-Note
-To re-project with dask, see -odc-geo & -pyresample.
-New in version 0.0.27: shape
-New in version 0.0.28: transform
-New in version 0.5.0: nodata, kwargs
-dst_crs (str) – OGC WKT string or Proj.4 string.
resolution (float or tuple(float, float), optional) – Size of a destination pixel in destination projection units -(e.g. degrees or metres).
shape (tuple(int, int), optional) – Shape of the destination in pixels (dst_height, dst_width). Cannot be used -together with resolution.
transform (Affine, optional) – The destination transform.
resampling (rasterio.enums.Resampling, optional) – See rasterio.warp.reproject()
for more details.
nodata (float, optional) – The nodata value used to initialize the destination; -it will remain in all areas not covered by the reprojected source. -Defaults to the nodata value of the source image if none provided -and exists or attempts to find an appropriate value by dtype.
**kwargs (dict) – Additional keyword arguments to pass into rasterio.warp.reproject()
.
-To override:
-- src_transform: rio.write_transform
-- src_crs: rio.write_crs
-- src_nodata: rio.write_nodata
The reprojected DataArray.
-Reproject a DataArray object to match the resolution, projection, -and region of another DataArray.
-Powered by rasterio.warp.reproject()
Note
-Only 2D/3D arrays with dimensions ‘x’/’y’ are currently supported. -Requires either a grid mapping variable with ‘spatial_ref’ or -a ‘crs’ attribute to be set containing a valid CRS. -If using a WKT (e.g. from spatiareference.org), make sure it is an OGC WKT.
-New in version 0.9: reproject_kwargs
-match_data_array (xarray.DataArray
| xarray.Dataset
) – DataArray of the target resolution and projection.
resampling (rasterio.enums.Resampling, optional) – See rasterio.warp.reproject()
for more details.
**reproject_kwargs – Other options to pass to rioxarray.raster_array.RasterArray.reproject()
Contains the data from the src_data_array, reprojected to match -match_data_array.
-Set the nodata value for the DataArray without modifying -the data array.
-input_nodata (Optional[float]) – Valid nodata for dtype.
inplace (bool, optional) – If True, it will write to the existing dataset. Default is True.
Dataset with nodata attribute set.
-Export the DataArray to a raster file.
-..versionadded:: 0.2 lock
-raster_path (Union[str, os.PathLike]) – The path to output the raster to.
driver (str, optional) – The name of the GDAL/rasterio driver to use to export the raster. -Default is “GTiff” if rasterio < 1.2 otherwise it will autodetect.
dtype (str, optional) – The data type to write the raster to. Default is the datasets dtype.
tags (dict, optional) – A dictionary of tags to write to the raster.
windowed (bool, optional) – If True, it will write using the windows of the output raster. -This is useful for loading data in chunks when writing. Does not -do anything when writing with dask. -Default is False.
recalc_transform (bool, optional) – If False, it will write the raster with the cached transform from -the dataarray rather than recalculating it. -Default is True.
lock (boolean or Lock, optional) – Lock to use to write data using dask. -If not supplied, it will use a single process for writing.
compute (bool, optional) – If True and data is a dask array, then compute and save
-the data immediately. If False, return a dask Delayed object.
-Call “.compute()” on the Delayed object to compute the result
-later. Call dask.compute(delayed1, delayed2)
to save
-multiple delayed files at once. Default is True.
**profile_kwargs – Additional keyword arguments to pass into writing the raster. The -nodata, transform, crs, count, width, and height attributes -are ignored.
If the data array is a dask array and compute -is True. Otherwise None is returned.
-dask.Delayed
Write the nodata to the DataArray in a CF compliant manner.
-input_nodata (Optional[float]) – Nodata value for the DataArray. -If input_nodata is None, it will remove the _FillValue attribute.
encoded (bool, optional) – If True, it will write the nodata value in the encoding and remove -the fill value from the attributes. This is useful for masking -with nodata. Default is False.
inplace (bool, optional) – If True, it will write to the existing DataArray. Default is False.
Modified DataArray with CF compliant nodata information.
-Examples
-To write the nodata value if it is missing:
->>> raster.rio.write_nodata(-9999, inplace=True)
-
To write the nodata value on a copy:
->>> raster = raster.rio.write_nodata(-9999)
-
To mask with nodata:
->>> nodata = raster.rio.nodata
->>> raster = raster.where(raster != nodata)
->>> raster.rio.write_nodata(nodata, encoded=True, inplace=True)
-
Bases: XRasterBase
This is the GIS extension for xarray.Dataset
Crops a xarray.Dataset
by geojson like geometry dicts in dimensions ‘x’/’y’.
Warning
-Clips variables that have dimensions ‘x’/’y’. Others are appended as is.
-Powered by rasterio.features.geometry_mask.
-Examples
->>> geometry = ''' {"type": "Polygon",
-... "coordinates": [
-... [[-94.07955380199459, 41.69085871273774],
-... [-94.06082436942204, 41.69103313774798],
-... [-94.06063203899649, 41.67932439500822],
-... [-94.07935807746362, 41.679150041277325],
-... [-94.07955380199459, 41.69085871273774]]]}'''
->>> cropping_geometries = [geojson.loads(geometry)]
->>> xds = xarray.open_rasterio('cool_raster.tif')
->>> cropped = xds.rio.clip(geometries=cropping_geometries, crs=4326)
-
New in version 0.2: from_disk
-geometries (list) – A list of geojson geometry dicts.
crs (rasterio.crs.CRS
, optional) – The CRS of the input geometries. Default is to assume it is the same
-as the dataset.
all_touched (boolean, optional) – If True, all pixels touched by geometries will be burned in. If -false, only pixels whose center is within the polygon or that -are selected by Bresenham’s line algorithm will be burned in.
drop (bool, optional) – If True, drop the data outside of the extent of the mask geometries -Otherwise, it will return the same raster with the data masked. -Default is True.
invert (boolean, optional) – If False, pixels that do not overlap shapes will be set as nodata. -Otherwise, pixels that overlap the shapes will be set as nodata. -False by default.
from_disk (boolean, optional) – If True, it will clip from disk using rasterio.mask.mask if possible. -This is beneficial when the size of the data is larger than memory. -Default is False.
The clipped object.
-Clip the xarray.Dataset
by a bounding box in dimensions ‘x’/’y’.
Warning
-Clips variables that have dimensions ‘x’/’y’. Others are appended as is.
-New in version 0.12: crs
-minx (float) – Minimum bound for x coordinate.
miny (float) – Minimum bound for y coordinate.
maxx (float) – Maximum bound for x coordinate.
maxy (float) – Maximum bound for y coordinate.
auto_expand (bool) – If True, it will expand clip search if only 1D raster found with clip.
auto_expand_limit (int) – maximum number of times the clip will be retried before raising -an exception.
crs (rasterio.crs.CRS
, optional) – The CRS of the bounding box. Default is to assume it is the same
-as the dataset.
The clipped object.
-Dataset
-rasterio.crs.CRS
:
-Retrieve projection from xarray.Dataset
This method uses scipy.interpolate.griddata to interpolate missing data.
-Warning
-scipy is an optional dependency.
-Warning
-Interpolates variables that have dimensions ‘x’/’y’. Others are appended as is.
-method ({'linear', 'nearest', 'cubic'}, optional) – The method to use for interpolation in scipy.interpolate.griddata.
-The interpolated object.
-Pad the xarray.Dataset
to a bounding box.
Warning
-Only works if all variables in the dataset have the -same coordinates.
-Warning
-Pads variables that have dimensions ‘x’/’y’. Others are appended as is.
-minx (float) – Minimum bound for x coordinate.
miny (float) – Minimum bound for y coordinate.
maxx (float) – Maximum bound for x coordinate.
maxy (float) – Maximum bound for y coordinate.
constant_values (scalar, tuple or mapping of hashable to tuple) – The value used for padding. If None, nodata will be used if it is -set, and numpy.nan otherwise.
The padded object.
-Reproject xarray.Dataset
objects
Note
-Only 2D/3D arrays with dimensions ‘x’/’y’ are currently supported. -Others are appended as is. -Requires either a grid mapping variable with ‘spatial_ref’ or -a ‘crs’ attribute to be set containing a valid CRS. -If using a WKT (e.g. from spatiareference.org), make sure it is an OGC WKT.
-Note
-To re-project with dask, see -odc-geo & -pyresample.
-New in version 0.0.27: shape
-New in version 0.0.28: transform
-New in version 0.5.0: nodata, kwargs
-dst_crs (str) – OGC WKT string or Proj.4 string.
resolution (float or tuple(float, float), optional) – Size of a destination pixel in destination projection units -(e.g. degrees or metres).
shape (tuple(int, int), optional) – Shape of the destination in pixels (dst_height, dst_width). Cannot be used -together with resolution.
transform (Affine, optional) – The destination transform.
resampling (rasterio.enums.Resampling, optional) – See rasterio.warp.reproject()
for more details.
nodata (float, optional) – The nodata value used to initialize the destination; -it will remain in all areas not covered by the reprojected source. -Defaults to the nodata value of the source image if none provided -and exists or attempts to find an appropriate value by dtype.
**kwargs (dict) – Additional keyword arguments to pass into rasterio.warp.reproject()
.
-To override:
-- src_transform: rio.write_transform
-- src_crs: rio.write_crs
-- src_nodata: rio.write_nodata
The reprojected Dataset.
-Reproject a Dataset object to match the resolution, projection, -and region of another DataArray.
-Note
-Only 2D/3D arrays with dimensions ‘x’/’y’ are currently supported. -Others are appended as is. -Requires either a grid mapping variable with ‘spatial_ref’ or -a ‘crs’ attribute to be set containing a valid CRS. -If using a WKT (e.g. from spatiareference.org), make sure it is an OGC WKT.
-New in version 0.9: reproject_kwargs
-match_data_array (xarray.DataArray
| xarray.Dataset
) – Dataset with the target resolution and projection.
resampling (rasterio.enums.Resampling, optional) – See rasterio.warp.reproject()
for more details.
**reproject_kwargs – Other options to pass to rioxarray.raster_dataset.RasterDataset.reproject()
Contains the data from the src_data_array, -reprojected to match match_data_array.
-Export the Dataset to a raster file. Only works with 2D data.
-..versionadded:: 0.2 lock
-raster_path (str) – The path to output the raster to.
driver (str, optional) – The name of the GDAL/rasterio driver to use to export the raster. -Default is “GTiff” if rasterio < 1.2 otherwise it will autodetect.
dtype (str, optional) – The data type to write the raster to. Default is the datasets dtype.
tags (dict, optional) – A dictionary of tags to write to the raster.
windowed (bool, optional) – If True, it will write using the windows of the output raster. -This is useful for loading data in chunks when writing. Does not -do anything when writing with dask. -Default is False.
recalc_transform (bool, optional) – If False, it will write the raster with the cached transform from -the dataset rather than recalculating it. -Default is True.
lock (boolean or Lock, optional) – Lock to use to write data using dask. -If not supplied, it will use a single process for writing.
compute (bool, optional) – If True and data is a dask array, then compute and save
-the data immediately. If False, return a dask Delayed object.
-Call “.compute()” on the Delayed object to compute the result
-later. Call dask.compute(delayed1, delayed2)
to save
-multiple delayed files at once. Default is True.
**profile_kwargs – Additional keyword arguments to pass into writing the raster. The -nodata, transform, crs, count, width, and height attributes -are ignored.
If the data array is a dask array and compute -is True. Otherwise None is returned.
-dask.Delayed
Returns non-coordinate varibles
-list
-This contains exceptions for rioxarray.
-Bases: RioXarrayError
This is raised when there are more dimensions than is supported by the method
-Bases: RioXarrayError
This is raised when the dimension does not have the supporting coordinate.
-Bases: DimensionError
This is raised when there the dimensions are not ordered correctly.
-Bases: RioXarrayError
Missing the CRS in the dataset.
-Bases: DimensionError
This is raised when the dimension cannot be found
-Bases: RioXarrayError
This is for when there are no data in the bounds for clipping a raster.
-Bases: DimensionError
This is an error when you have a 1 dimensional raster.
-Bases: RuntimeError
This is the base exception for errors in the rioxarray extension.
-Bases: RioXarrayError
This is for when you have a dataset with a single variable.
-Bases: DimensionError
This is raised when there are more dimensions than is supported by the method
-<xarray.DataArray (y: 200, x: 200)>\n", - "array([[ nan, nan, nan, ..., 656., 656., 554.],\n", - " [ nan, nan, nan, ..., 694., 694., 642.],\n", - " [ nan, nan, nan, ..., 456., 575., 642.],\n", - " ...,\n", - " [993., 817., 817., ..., 471., 479., 498.],\n", - " [893., 893., 816., ..., 479., 479., 469.],\n", - " [816., 816., 832., ..., 515., 469., 485.]], dtype=float32)\n", - "Coordinates:\n", - " * y (y) float64 5.05e+06 5.05e+06 5.05e+06 ... 5.004e+06 5.004e+06\n", - " * x (x) float64 -7.274e+06 -7.274e+06 ... -7.228e+06 -7.228e+06\n", - "Attributes:\n", - " crs: +a=6371007.181 +b=6371007.181 +lon_0=0 +no_defs +proj=sinu +u...\n", - " res: [231.65635826 231.65635826]\n", - " is_tiled: 0\n", - " nodata: -28672.0\n", - " transform: [ 2.31656358e+02 0.00000000e+00 -7.27400965e+06 0.00000000e...
<xarray.Dataset>\n", - "Dimensions: (y: 10, x: 10, time: 2)\n", - "Coordinates:\n", - " * y (y) float64 8.085e+06 8.085e+06 ... 8.085e+06 8.085e+06\n", - " * x (x) float64 4.663e+05 4.663e+05 ... 4.663e+05 4.663e+05\n", - " * time (time) object 2016-12-19 10:27:29.687763 2016-12-29 12:52:42...\n", - " spatial_ref int64 0\n", - "Data variables:\n", - " blue (time, y, x) float64 6.611 5.581 0.3996 ... 3.491 5.056 3.368\n", - " green (time, y, x) float64 7.921 66.15 30.1 ... 21.76 27.29 18.41\n", - "Attributes:\n", - " coordinates: spatial_ref
array([[ nan, nan, nan, ..., 656., 656., 554.],\n", - " [ nan, nan, nan, ..., 694., 694., 642.],\n", - " [ nan, nan, nan, ..., 456., 575., 642.],\n", - " ...,\n", - " [993., 817., 817., ..., 471., 479., 498.],\n", - " [893., 893., 816., ..., 479., 479., 469.],\n", - " [816., 816., 832., ..., 515., 469., 485.]], dtype=float32)
array([5049992.781974, 5049761.125615, 5049529.469257, 5049297.812899,\n", - " 5049066.156541, 5048834.500182, 5048602.843824, 5048371.187466,\n", - " 5048139.531108, 5047907.874749, 5047676.218391, 5047444.562033,\n", - " 5047212.905674, 5046981.249316, 5046749.592958, 5046517.9366 ,\n", - " 5046286.280241, 5046054.623883, 5045822.967525, 5045591.311167,\n", - " 5045359.654808, 5045127.99845 , 5044896.342092, 5044664.685734,\n", - " 5044433.029375, 5044201.373017, 5043969.716659, 5043738.0603 ,\n", - " 5043506.403942, 5043274.747584, 5043043.091226, 5042811.434867,\n", - " 5042579.778509, 5042348.122151, 5042116.465793, 5041884.809434,\n", - " 5041653.153076, 5041421.496718, 5041189.84036 , 5040958.184001,\n", - " 5040726.527643, 5040494.871285, 5040263.214927, 5040031.558568,\n", - " 5039799.90221 , 5039568.245852, 5039336.589493, 5039104.933135,\n", - " 5038873.276777, 5038641.620419, 5038409.96406 , 5038178.307702,\n", - " 5037946.651344, 5037714.994986, 5037483.338627, 5037251.682269,\n", - " 5037020.025911, 5036788.369553, 5036556.713194, 5036325.056836,\n", - " 5036093.400478, 5035861.74412 , 5035630.087761, 5035398.431403,\n", - " 5035166.775045, 5034935.118686, 5034703.462328, 5034471.80597 ,\n", - " 5034240.149612, 5034008.493253, 5033776.836895, 5033545.180537,\n", - " 5033313.524179, 5033081.86782 , 5032850.211462, 5032618.555104,\n", - " 5032386.898746, 5032155.242387, 5031923.586029, 5031691.929671,\n", - " 5031460.273313, 5031228.616954, 5030996.960596, 5030765.304238,\n", - " 5030533.647879, 5030301.991521, 5030070.335163, 5029838.678805,\n", - " 5029607.022446, 5029375.366088, 5029143.70973 , 5028912.053372,\n", - " 5028680.397013, 5028448.740655, 5028217.084297, 5027985.427939,\n", - " 5027753.77158 , 5027522.115222, 5027290.458864, 5027058.802506,\n", - " 5026827.146147, 5026595.489789, 5026363.833431, 5026132.177072,\n", - " 5025900.520714, 5025668.864356, 5025437.207998, 5025205.551639,\n", - " 5024973.895281, 5024742.238923, 5024510.582565, 5024278.926206,\n", - " 5024047.269848, 5023815.61349 , 5023583.957132, 5023352.300773,\n", - " 5023120.644415, 5022888.988057, 5022657.331698, 5022425.67534 ,\n", - " 5022194.018982, 5021962.362624, 5021730.706265, 5021499.049907,\n", - " 5021267.393549, 5021035.737191, 5020804.080832, 5020572.424474,\n", - " 5020340.768116, 5020109.111758, 5019877.455399, 5019645.799041,\n", - " 5019414.142683, 5019182.486325, 5018950.829966, 5018719.173608,\n", - " 5018487.51725 , 5018255.860891, 5018024.204533, 5017792.548175,\n", - " 5017560.891817, 5017329.235458, 5017097.5791 , 5016865.922742,\n", - " 5016634.266384, 5016402.610025, 5016170.953667, 5015939.297309,\n", - " 5015707.640951, 5015475.984592, 5015244.328234, 5015012.671876,\n", - " 5014781.015518, 5014549.359159, 5014317.702801, 5014086.046443,\n", - " 5013854.390084, 5013622.733726, 5013391.077368, 5013159.42101 ,\n", - " 5012927.764651, 5012696.108293, 5012464.451935, 5012232.795577,\n", - " 5012001.139218, 5011769.48286 , 5011537.826502, 5011306.170144,\n", - " 5011074.513785, 5010842.857427, 5010611.201069, 5010379.544711,\n", - " 5010147.888352, 5009916.231994, 5009684.575636, 5009452.919277,\n", - " 5009221.262919, 5008989.606561, 5008757.950203, 5008526.293844,\n", - " 5008294.637486, 5008062.981128, 5007831.32477 , 5007599.668411,\n", - " 5007368.012053, 5007136.355695, 5006904.699337, 5006673.042978,\n", - " 5006441.38662 , 5006209.730262, 5005978.073904, 5005746.417545,\n", - " 5005514.761187, 5005283.104829, 5005051.44847 , 5004819.792112,\n", - " 5004588.135754, 5004356.479396, 5004124.823037, 5003893.166679])
array([-7273893.821307, -7273662.164949, -7273430.508591, -7273198.852232,\n", - " -7272967.195874, -7272735.539516, -7272503.883158, -7272272.226799,\n", - " -7272040.570441, -7271808.914083, -7271577.257725, -7271345.601366,\n", - " -7271113.945008, -7270882.28865 , -7270650.632291, -7270418.975933,\n", - " -7270187.319575, -7269955.663217, -7269724.006858, -7269492.3505 ,\n", - " -7269260.694142, -7269029.037784, -7268797.381425, -7268565.725067,\n", - " -7268334.068709, -7268102.412351, -7267870.755992, -7267639.099634,\n", - " -7267407.443276, -7267175.786918, -7266944.130559, -7266712.474201,\n", - " -7266480.817843, -7266249.161484, -7266017.505126, -7265785.848768,\n", - " -7265554.19241 , -7265322.536051, -7265090.879693, -7264859.223335,\n", - " -7264627.566977, -7264395.910618, -7264164.25426 , -7263932.597902,\n", - " -7263700.941544, -7263469.285185, -7263237.628827, -7263005.972469,\n", - " -7262774.31611 , -7262542.659752, -7262311.003394, -7262079.347036,\n", - " -7261847.690677, -7261616.034319, -7261384.377961, -7261152.721603,\n", - " -7260921.065244, -7260689.408886, -7260457.752528, -7260226.09617 ,\n", - " -7259994.439811, -7259762.783453, -7259531.127095, -7259299.470737,\n", - " -7259067.814378, -7258836.15802 , -7258604.501662, -7258372.845303,\n", - " -7258141.188945, -7257909.532587, -7257677.876229, -7257446.21987 ,\n", - " -7257214.563512, -7256982.907154, -7256751.250796, -7256519.594437,\n", - " -7256287.938079, -7256056.281721, -7255824.625363, -7255592.969004,\n", - " -7255361.312646, -7255129.656288, -7254897.99993 , -7254666.343571,\n", - " -7254434.687213, -7254203.030855, -7253971.374496, -7253739.718138,\n", - " -7253508.06178 , -7253276.405422, -7253044.749063, -7252813.092705,\n", - " -7252581.436347, -7252349.779989, -7252118.12363 , -7251886.467272,\n", - " -7251654.810914, -7251423.154556, -7251191.498197, -7250959.841839,\n", - " -7250728.185481, -7250496.529122, -7250264.872764, -7250033.216406,\n", - " -7249801.560048, -7249569.903689, -7249338.247331, -7249106.590973,\n", - " -7248874.934615, -7248643.278256, -7248411.621898, -7248179.96554 ,\n", - " -7247948.309182, -7247716.652823, -7247484.996465, -7247253.340107,\n", - " -7247021.683749, -7246790.02739 , -7246558.371032, -7246326.714674,\n", - " -7246095.058315, -7245863.401957, -7245631.745599, -7245400.089241,\n", - " -7245168.432882, -7244936.776524, -7244705.120166, -7244473.463808,\n", - " -7244241.807449, -7244010.151091, -7243778.494733, -7243546.838375,\n", - " -7243315.182016, -7243083.525658, -7242851.8693 , -7242620.212942,\n", - " -7242388.556583, -7242156.900225, -7241925.243867, -7241693.587508,\n", - " -7241461.93115 , -7241230.274792, -7240998.618434, -7240766.962075,\n", - " -7240535.305717, -7240303.649359, -7240071.993001, -7239840.336642,\n", - " -7239608.680284, -7239377.023926, -7239145.367568, -7238913.711209,\n", - " -7238682.054851, -7238450.398493, -7238218.742135, -7237987.085776,\n", - " -7237755.429418, -7237523.77306 , -7237292.116701, -7237060.460343,\n", - " -7236828.803985, -7236597.147627, -7236365.491268, -7236133.83491 ,\n", - " -7235902.178552, -7235670.522194, -7235438.865835, -7235207.209477,\n", - " -7234975.553119, -7234743.896761, -7234512.240402, -7234280.584044,\n", - " -7234048.927686, -7233817.271327, -7233585.614969, -7233353.958611,\n", - " -7233122.302253, -7232890.645894, -7232658.989536, -7232427.333178,\n", - " -7232195.67682 , -7231964.020461, -7231732.364103, -7231500.707745,\n", - " -7231269.051387, -7231037.395028, -7230805.73867 , -7230574.082312,\n", - " -7230342.425954, -7230110.769595, -7229879.113237, -7229647.456879,\n", - " -7229415.80052 , -7229184.144162, -7228952.487804, -7228720.831446,\n", - " -7228489.175087, -7228257.518729, -7228025.862371, -7227794.206013])
<xarray.Dataset>\n", - "Dimensions: (time: 2, x: 10, y: 10)\n", - "Coordinates:\n", - " spatial_ref int64 0\n", - " * x (x) float64 4.663e+05 4.663e+05 ... 4.663e+05 4.663e+05\n", - " * time (time) datetime64[ns] 2016-12-19T10:27:29.687763 2016-12-29T...\n", - " * y (y) float64 8.085e+06 8.085e+06 ... 8.085e+06 8.085e+06\n", - "Data variables:\n", - " blue (time, y, x) float64 6.611 5.581 0.3996 ... 3.491 5.056 3.368\n", - " green (time, y, x) float64 7.921 66.15 30.1 ... 21.76 27.29 18.41
array(0)
array([466267.5, 466270.5, 466273.5, 466276.5, 466279.5, 466282.5, 466285.5,\n", - " 466288.5, 466291.5, 466294.5])
array(['2016-12-19T10:27:29.687763000', '2016-12-29T12:52:42.347451000'],\n", - " dtype='datetime64[ns]')
array([8084698.5, 8084695.5, 8084692.5, 8084689.5, 8084686.5, 8084683.5,\n", - " 8084680.5, 8084677.5, 8084674.5, 8084671.5])
array([[[6.611017, 5.580979, 0.399607, 2.052803, 5.479985, 4.760219,\n", - " 5.077927, 5.574792, 0.726683, 5.170288],\n", - " [4.535516, 0.088263, 4.222302, 0.289199, 3.478147, 3.227945,\n", - " 2.736443, 2.821799, 1.04221 , 1.099616],\n", - " [0.071364, 4.393267, 0.496907, 2.311926, 1.19123 , 5.984189,\n", - " 5.266977, 1.146988, 3.219185, 0.982011],\n", - " [5.818552, 5.176065, 4.891903, 4.557147, 4.706706, 4.670835,\n", - " 2.344188, 0.493237, 2.707488, 4.15662 ],\n", - " [0.78458 , 3.838047, 0.803847, 1.588861, 5.002692, 2.565792,\n", - " 5.195116, 2.642173, 1.47148 , 6.854931],\n", - " [4.407593, 6.734126, 3.702884, 4.682068, 0.498895, 2.931409,\n", - " 5.713952, 6.388658, 6.618634, 3.267176],\n", - " [6.175152, 2.331721, 6.829246, 1.650656, 5.530157, 0.167316,\n", - " 6.406627, 4.559968, 0.083601, 1.231478],\n", - " [2.438085, 1.593681, 5.902827, 2.124327, 1.870033, 4.519462,\n", - " 4.845505, 6.214475, 2.237778, 4.178488],\n", - " [5.855639, 5.824333, 6.006212, 5.43411 , 3.393885, 6.183958,\n", - " 1.284061, 0.028555, 6.726706, 1.516993],\n", - " [0.844103, 5.61329 , 6.978045, 1.183779, 2.539376, 1.268038,\n", - " 1.27574 , 5.101129, 2.020225, 2.081746]],\n", - "\n", - " [[6.242907, 6.394763, 1.302492, 2.895077, 0.537664, 5.012127,\n", - " 4.380069, 1.624007, 5.56236 , 4.303491],\n", - " [6.745937, 3.653031, 5.116965, 1.835256, 3.514732, 6.523679,\n", - " 0.17986 , 2.444248, 5.169099, 0.584623],\n", - " [2.119518, 0.479052, 4.274714, 6.019049, 6.346329, 2.858441,\n", - " 3.976887, 4.218218, 4.320734, 3.941107],\n", - " [2.974649, 6.67996 , 3.16875 , 3.507162, 2.909711, 1.359386,\n", - " 4.023015, 0.258809, 2.007625, 5.744746],\n", - " [2.572982, 1.250802, 4.652006, 6.310958, 3.266477, 3.216613,\n", - " 3.47315 , 1.713415, 5.9575 , 2.729016],\n", - " [2.743341, 4.203021, 1.306215, 2.840115, 5.351451, 2.774677,\n", - " 2.621753, 5.288077, 0.722618, 4.086008],\n", - " [3.168672, 4.031863, 5.775087, 3.487762, 6.034522, 4.721887,\n", - " 5.093018, 0.386024, 2.278799, 1.536351],\n", - " [4.548959, 4.797754, 6.371152, 3.69425 , 6.855343, 1.987757,\n", - " 4.749654, 1.750539, 1.804795, 4.966536],\n", - " [1.78139 , 1.544275, 1.750026, 3.373274, 2.604254, 5.894015,\n", - " 3.217723, 5.637478, 2.900347, 2.097813],\n", - " [1.844174, 6.994962, 2.504717, 5.292132, 0.184411, 4.870834,\n", - " 1.888442, 3.491315, 5.055704, 3.368395]]])
array([[[ 7.920639, 66.150832, 30.096116, 30.437197, 57.794734, 11.538647,\n", - " 14.426782, 35.593171, 53.784858, 0.449093],\n", - " [23.804111, 67.910347, 18.694533, 30.41474 , 68.117674, 44.906057,\n", - " 62.311842, 37.485047, 57.134336, 7.52572 ],\n", - " [36.653481, 39.596833, 61.07603 , 56.883093, 29.635613, 64.114699,\n", - " 42.341689, 54.724789, 31.872344, 11.282554],\n", - " [32.502176, 28.090549, 58.398927, 41.224019, 34.804777, 32.184565,\n", - " 59.392327, 9.155824, 52.967172, 67.409236],\n", - " [18.79468 , 8.543429, 35.834698, 3.596245, 30.252802, 41.549499,\n", - " 23.060248, 7.267762, 27.374099, 0.684024],\n", - " [ 9.93227 , 44.509446, 22.01927 , 28.514121, 36.715233, 15.03939 ,\n", - " 2.43399 , 0.636075, 34.43023 , 37.024545],\n", - " [28.874823, 1.514255, 34.210822, 10.49793 , 54.025491, 64.294026,\n", - " 36.212813, 17.766628, 45.295952, 10.349576],\n", - " [58.961924, 47.334638, 64.844646, 37.634131, 7.815025, 35.139303,\n", - " 18.501505, 35.01185 , 27.761908, 13.240655],\n", - " [49.544668, 57.716538, 27.389778, 11.604377, 24.826367, 15.449456,\n", - " 26.482386, 42.855739, 10.4958 , 59.267182],\n", - " [37.094093, 43.294246, 33.240747, 16.85513 , 54.705119, 14.633291,\n", - " 35.138742, 50.101683, 57.495953, 52.795405]],\n", - "\n", - " [[26.137058, 16.448086, 4.503539, 33.351036, 20.32524 , 63.369743,\n", - " 11.531512, 38.629561, 59.821441, 11.547508],\n", - " [ 6.438471, 28.948907, 9.949052, 23.234921, 65.539507, 9.822554,\n", - " 55.754023, 51.590388, 57.047098, 4.929671],\n", - " [32.360472, 17.939979, 52.889505, 69.181176, 39.2923 , 56.442225,\n", - " 5.699603, 21.092554, 8.93472 , 23.810367],\n", - " [ 3.715695, 30.653733, 44.540496, 48.578544, 24.032477, 30.339109,\n", - " 37.376636, 58.787274, 49.308994, 59.510765],\n", - " [37.991912, 50.023013, 63.711135, 26.546118, 60.561058, 36.098302,\n", - " 10.725673, 40.51609 , 47.479255, 42.710909],\n", - " [15.32887 , 15.878984, 30.914778, 25.902812, 3.815428, 35.530366,\n", - " 48.426293, 44.280075, 4.468083, 18.00032 ],\n", - " [58.286993, 20.397714, 63.124 , 69.117495, 1.32436 , 29.024715,\n", - " 31.601531, 34.307982, 7.990292, 36.159696],\n", - " [34.674335, 62.344993, 5.714717, 2.161448, 66.714977, 52.443751,\n", - " 12.791859, 63.707491, 13.697063, 9.394474],\n", - " [16.766346, 26.260123, 40.68158 , 18.695085, 64.812126, 23.151592,\n", - " 50.597642, 61.308205, 31.517123, 23.469797],\n", - " [44.670989, 17.533084, 39.034907, 32.676726, 53.275139, 48.731172,\n", - " 12.958856, 21.760335, 27.292202, 18.409063]]])
<xarray.Dataset>\n", - "Dimensions: (time: 2, x: 10, y: 10)\n", - "Coordinates:\n", - " * x (x) float64 -51.32 -51.32 -51.32 ... -51.32 -51.32 -51.32\n", - " * y (y) float64 -17.32 -17.32 -17.32 ... -17.32 -17.32 -17.32\n", - " * time (time) datetime64[ns] 2016-12-19T10:27:29.687763 2016-12-29T...\n", - " spatial_ref int64 0\n", - "Data variables:\n", - " blue (time, y, x) float64 6.611 5.581 0.3996 ... 3.491 5.056 3.368\n", - " green (time, y, x) float64 7.921 66.15 30.1 ... 21.76 27.29 18.41
array([-51.317454, -51.317426, -51.317399, -51.317371, -51.317343, -51.317316,\n", - " -51.317288, -51.31726 , -51.317233, -51.317205])
array([-17.322876, -17.322903, -17.322931, -17.322959, -17.322986, -17.323014,\n", - " -17.323042, -17.323069, -17.323097, -17.323125])
array(['2016-12-19T10:27:29.687763000', '2016-12-29T12:52:42.347451000'],\n", - " dtype='datetime64[ns]')
array(0)
array([[[6.61101706, 5.58097901, 0.39960727, 2.05280345, 5.47998484,\n", - " 4.76021916, 5.07792715, 5.57479217, 0.72668295, 5.17028805],\n", - " [4.53551623, 0.08826297, 4.22230213, 0.28919903, 3.47814709,\n", - " 3.22794508, 2.73644333, 2.82179869, 1.04221025, 1.09961647],\n", - " [0.07136352, 4.39326719, 0.49690677, 2.31192634, 1.19123023,\n", - " 5.98418893, 5.26697738, 1.14698827, 3.21918509, 0.98201077],\n", - " [5.81855223, 5.17606488, 4.89190312, 4.55714657, 4.70670585,\n", - " 4.67083516, 2.34418779, 0.4932369 , 2.70748782, 4.15661977],\n", - " [0.78457995, 3.83804728, 0.80384702, 1.58886148, 5.00269192,\n", - " 2.56579153, 5.19511563, 2.6421726 , 1.47148022, 6.85493052],\n", - " [4.40759261, 6.73412648, 3.70288412, 4.68206812, 0.49889505,\n", - " 2.93140894, 5.71395224, 6.38865828, 6.61863439, 3.26717611],\n", - " [6.17515163, 2.33172093, 6.82924574, 1.65065578, 5.53015689,\n", - " 0.16731611, 6.40662729, 4.55996818, 0.08360072, 1.23147801],\n", - " [2.43808458, 1.59368058, 5.90282702, 2.12432736, 1.8700329 ,\n", - " 4.51946233, 4.84550488, 6.21447495, 2.23777817, 4.17848811],\n", - " [5.85563919, 5.82433331, 6.00621159, 5.43410984, 3.39388501,\n", - " 6.18395763, 1.28406097, 0.02855524, 6.72670605, 1.51699291],\n", - " [0.84410255, 5.61328959, 6.97804534, 1.18377926, 2.53937552,\n", - " 1.26803824, 1.27574049, 5.10112907, 2.02022521, 2.08174614]],\n", - "...\n", - " [[6.24290664, 6.39476317, 1.30249235, 2.89507681, 0.5376643 ,\n", - " 5.01212706, 4.38006939, 1.62400749, 5.56236028, 4.30349107],\n", - " [6.74593668, 3.65303136, 5.11696455, 1.83525557, 3.51473159,\n", - " 6.52367921, 0.17986011, 2.44424775, 5.16909939, 0.58462293],\n", - " [2.11951807, 0.47905249, 4.27471358, 6.01904917, 6.34632929,\n", - " 2.85844128, 3.97688665, 4.21821752, 4.32073426, 3.9411069 ],\n", - " [2.97464873, 6.67996018, 3.16875018, 3.50716191, 2.9097115 ,\n", - " 1.35938603, 4.0230148 , 0.2588095 , 2.00762528, 5.74474576],\n", - " [2.57298212, 1.25080228, 4.65200582, 6.31095753, 3.26647688,\n", - " 3.21661254, 3.4731501 , 1.71341466, 5.95750029, 2.72901564],\n", - " [2.74334064, 4.20302139, 1.30621538, 2.84011532, 5.35145147,\n", - " 2.77467654, 2.6217532 , 5.28807657, 0.72261773, 4.08600786],\n", - " [3.1686723 , 4.0318634 , 5.77508699, 3.48776179, 6.0345218 ,\n", - " 4.72188719, 5.09301838, 0.38602444, 2.27879875, 1.5363515 ],\n", - " [4.54895876, 4.79775447, 6.3711522 , 3.69424969, 6.85534296,\n", - " 1.98775739, 4.74965416, 1.75053883, 1.80479493, 4.96653593],\n", - " [1.78138972, 1.54427475, 1.75002571, 3.37327355, 2.60425433,\n", - " 5.89401538, 3.2177233 , 5.63747771, 2.90034677, 2.09781306],\n", - " [1.84417405, 6.99496234, 2.50471667, 5.29213205, 0.1844106 ,\n", - " 4.87083388, 1.88844217, 3.49131529, 5.05570379, 3.36839469]]])
array([[[ 7.92063876, 66.15083163, 30.09611622, 30.43719706,\n", - " 57.79473407, 11.53864677, 14.42678195, 35.5931706 ,\n", - " 53.78485769, 0.44909335],\n", - " [23.80411143, 67.91034696, 18.6945325 , 30.41473977,\n", - " 68.11767437, 44.90605663, 62.31184244, 37.48504719,\n", - " 57.13433649, 7.52571983],\n", - " [36.65348061, 39.59683262, 61.07602968, 56.88309328,\n", - " 29.63561283, 64.11469949, 42.34168918, 54.7247894 ,\n", - " 31.87234425, 11.28255401],\n", - " [32.50217644, 28.0905485 , 58.39892665, 41.22401876,\n", - " 34.80477692, 32.18456541, 59.39232697, 9.15582412,\n", - " 52.96717208, 67.40923645],\n", - " [18.79468009, 8.54342894, 35.83469773, 3.59624497,\n", - " 30.25280185, 41.54949902, 23.06024792, 7.26776162,\n", - " 27.37409939, 0.68402392],\n", - " [ 9.93226996, 44.50944555, 22.01927024, 28.5141208 ,\n", - " 36.71523344, 15.03939035, 2.43398964, 0.63607503,\n", - " 34.43022974, 37.02454468],\n", - " [28.87482271, 1.51425484, 34.21082199, 10.49792965,\n", - " 54.02549098, 64.29402555, 36.21281265, 17.76662845,\n", - "...\n", - " 24.03247674, 30.33910879, 37.37663568, 58.78727396,\n", - " 49.30899357, 59.51076479],\n", - " [37.99191205, 50.0230132 , 63.71113469, 26.54611832,\n", - " 60.5610581 , 36.09830221, 10.72567322, 40.51609048,\n", - " 47.47925466, 42.71090946],\n", - " [15.32887012, 15.8789836 , 30.91477844, 25.90281227,\n", - " 3.81542846, 35.53036591, 48.42629307, 44.28007476,\n", - " 4.46808288, 18.00031979],\n", - " [58.28699332, 20.39771424, 63.12400017, 69.11749464,\n", - " 1.32435965, 29.02471509, 31.60153124, 34.30798249,\n", - " 7.99029205, 36.15969559],\n", - " [34.67433533, 62.34499312, 5.71471651, 2.16144808,\n", - " 66.71497747, 52.4437509 , 12.79185872, 63.70749094,\n", - " 13.69706259, 9.39447357],\n", - " [16.76634605, 26.26012306, 40.68158023, 18.69508482,\n", - " 64.81212607, 23.15159177, 50.59764238, 61.30820521,\n", - " 31.51712269, 23.46979658],\n", - " [44.67098863, 17.53308399, 39.03490717, 32.67672611,\n", - " 53.27513876, 48.73117231, 12.95885589, 21.760335 ,\n", - " 27.29220175, 18.40906318]]])
<xarray.DataArray 'spatial_ref' ()>\n", - "array(0)\n", - "Coordinates:\n", - " spatial_ref int64 0\n", - "Attributes:\n", - " spatial_ref: PROJCS["WGS 84 / UTM zone 22S",GEOGCS["WGS 84",DATUM["WGS_1...
array(0)
array(0)
<xarray.DataArray 'spatial_ref' ()>\n", - "array(0)\n", - "Coordinates:\n", - " spatial_ref int64 0\n", - "Attributes:\n", - " crs_wkt: GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["...\n", - " semi_major_axis: 6378137.0\n", - " semi_minor_axis: 6356752.314245179\n", - " inverse_flattening: 298.257223563\n", - " reference_ellipsoid_name: WGS 84\n", - " longitude_of_prime_meridian: 0.0\n", - " prime_meridian_name: Greenwich\n", - " geographic_crs_name: WGS 84\n", - " grid_mapping_name: latitude_longitude\n", - " spatial_ref: GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["...
array(0)
array(0)
<xarray.DataArray (y: 200, x: 200)>\n", - "array([[ nan, nan, nan, ..., 656., 656., 554.],\n", - " [ nan, nan, nan, ..., 694., 694., 642.],\n", - " [ nan, nan, nan, ..., 456., 575., 642.],\n", - " ...,\n", - " [993., 817., 817., ..., 471., 479., 498.],\n", - " [893., 893., 816., ..., 479., 479., 469.],\n", - " [816., 816., 832., ..., 515., 469., 485.]], dtype=float32)\n", - "Coordinates:\n", - " * y (y) float64 5.05e+06 5.05e+06 5.05e+06 ... 5.004e+06 5.004e+06\n", - " * x (x) float64 -7.274e+06 -7.274e+06 ... -7.228e+06 -7.228e+06\n", - "Attributes:\n", - " crs: +a=6371007.181 +b=6371007.181 +lon_0=0 +no_defs +proj=sinu +u...\n", - " res: [231.65635826 231.65635826]\n", - " is_tiled: 0\n", - " nodata: -28672.0\n", - " transform: [ 2.31656358e+02 0.00000000e+00 -7.27400965e+06 0.00000000e...
<xarray.Dataset>\n", - "Dimensions: (y: 10, x: 10, time: 2)\n", - "Coordinates:\n", - " * y (y) float64 8.085e+06 8.085e+06 ... 8.085e+06 8.085e+06\n", - " * x (x) float64 4.663e+05 4.663e+05 ... 4.663e+05 4.663e+05\n", - " * time (time) object 2016-12-19 10:27:29.687763 2016-12-29 12:52:42...\n", - " spatial_ref int64 0\n", - "Data variables:\n", - " blue (time, y, x) float64 6.611 5.581 0.3996 ... 3.491 5.056 3.368\n", - " green (time, y, x) float64 7.921 66.15 30.1 ... 21.76 27.29 18.41\n", - "Attributes:\n", - " coordinates: spatial_ref
array([[ nan, nan, nan, ..., 656., 656., 554.],\n", - " [ nan, nan, nan, ..., 694., 694., 642.],\n", - " [ nan, nan, nan, ..., 456., 575., 642.],\n", - " ...,\n", - " [993., 817., 817., ..., 471., 479., 498.],\n", - " [893., 893., 816., ..., 479., 479., 469.],\n", - " [816., 816., 832., ..., 515., 469., 485.]], dtype=float32)
array([5049992.781974, 5049761.125615, 5049529.469257, 5049297.812899,\n", - " 5049066.156541, 5048834.500182, 5048602.843824, 5048371.187466,\n", - " 5048139.531108, 5047907.874749, 5047676.218391, 5047444.562033,\n", - " 5047212.905674, 5046981.249316, 5046749.592958, 5046517.9366 ,\n", - " 5046286.280241, 5046054.623883, 5045822.967525, 5045591.311167,\n", - " 5045359.654808, 5045127.99845 , 5044896.342092, 5044664.685734,\n", - " 5044433.029375, 5044201.373017, 5043969.716659, 5043738.0603 ,\n", - " 5043506.403942, 5043274.747584, 5043043.091226, 5042811.434867,\n", - " 5042579.778509, 5042348.122151, 5042116.465793, 5041884.809434,\n", - " 5041653.153076, 5041421.496718, 5041189.84036 , 5040958.184001,\n", - " 5040726.527643, 5040494.871285, 5040263.214927, 5040031.558568,\n", - " 5039799.90221 , 5039568.245852, 5039336.589493, 5039104.933135,\n", - " 5038873.276777, 5038641.620419, 5038409.96406 , 5038178.307702,\n", - " 5037946.651344, 5037714.994986, 5037483.338627, 5037251.682269,\n", - " 5037020.025911, 5036788.369553, 5036556.713194, 5036325.056836,\n", - " 5036093.400478, 5035861.74412 , 5035630.087761, 5035398.431403,\n", - " 5035166.775045, 5034935.118686, 5034703.462328, 5034471.80597 ,\n", - " 5034240.149612, 5034008.493253, 5033776.836895, 5033545.180537,\n", - " 5033313.524179, 5033081.86782 , 5032850.211462, 5032618.555104,\n", - " 5032386.898746, 5032155.242387, 5031923.586029, 5031691.929671,\n", - " 5031460.273313, 5031228.616954, 5030996.960596, 5030765.304238,\n", - " 5030533.647879, 5030301.991521, 5030070.335163, 5029838.678805,\n", - " 5029607.022446, 5029375.366088, 5029143.70973 , 5028912.053372,\n", - " 5028680.397013, 5028448.740655, 5028217.084297, 5027985.427939,\n", - " 5027753.77158 , 5027522.115222, 5027290.458864, 5027058.802506,\n", - " 5026827.146147, 5026595.489789, 5026363.833431, 5026132.177072,\n", - " 5025900.520714, 5025668.864356, 5025437.207998, 5025205.551639,\n", - " 5024973.895281, 5024742.238923, 5024510.582565, 5024278.926206,\n", - " 5024047.269848, 5023815.61349 , 5023583.957132, 5023352.300773,\n", - " 5023120.644415, 5022888.988057, 5022657.331698, 5022425.67534 ,\n", - " 5022194.018982, 5021962.362624, 5021730.706265, 5021499.049907,\n", - " 5021267.393549, 5021035.737191, 5020804.080832, 5020572.424474,\n", - " 5020340.768116, 5020109.111758, 5019877.455399, 5019645.799041,\n", - " 5019414.142683, 5019182.486325, 5018950.829966, 5018719.173608,\n", - " 5018487.51725 , 5018255.860891, 5018024.204533, 5017792.548175,\n", - " 5017560.891817, 5017329.235458, 5017097.5791 , 5016865.922742,\n", - " 5016634.266384, 5016402.610025, 5016170.953667, 5015939.297309,\n", - " 5015707.640951, 5015475.984592, 5015244.328234, 5015012.671876,\n", - " 5014781.015518, 5014549.359159, 5014317.702801, 5014086.046443,\n", - " 5013854.390084, 5013622.733726, 5013391.077368, 5013159.42101 ,\n", - " 5012927.764651, 5012696.108293, 5012464.451935, 5012232.795577,\n", - " 5012001.139218, 5011769.48286 , 5011537.826502, 5011306.170144,\n", - " 5011074.513785, 5010842.857427, 5010611.201069, 5010379.544711,\n", - " 5010147.888352, 5009916.231994, 5009684.575636, 5009452.919277,\n", - " 5009221.262919, 5008989.606561, 5008757.950203, 5008526.293844,\n", - " 5008294.637486, 5008062.981128, 5007831.32477 , 5007599.668411,\n", - " 5007368.012053, 5007136.355695, 5006904.699337, 5006673.042978,\n", - " 5006441.38662 , 5006209.730262, 5005978.073904, 5005746.417545,\n", - " 5005514.761187, 5005283.104829, 5005051.44847 , 5004819.792112,\n", - " 5004588.135754, 5004356.479396, 5004124.823037, 5003893.166679])
array([-7273893.821307, -7273662.164949, -7273430.508591, -7273198.852232,\n", - " -7272967.195874, -7272735.539516, -7272503.883158, -7272272.226799,\n", - " -7272040.570441, -7271808.914083, -7271577.257725, -7271345.601366,\n", - " -7271113.945008, -7270882.28865 , -7270650.632291, -7270418.975933,\n", - " -7270187.319575, -7269955.663217, -7269724.006858, -7269492.3505 ,\n", - " -7269260.694142, -7269029.037784, -7268797.381425, -7268565.725067,\n", - " -7268334.068709, -7268102.412351, -7267870.755992, -7267639.099634,\n", - " -7267407.443276, -7267175.786918, -7266944.130559, -7266712.474201,\n", - " -7266480.817843, -7266249.161484, -7266017.505126, -7265785.848768,\n", - " -7265554.19241 , -7265322.536051, -7265090.879693, -7264859.223335,\n", - " -7264627.566977, -7264395.910618, -7264164.25426 , -7263932.597902,\n", - " -7263700.941544, -7263469.285185, -7263237.628827, -7263005.972469,\n", - " -7262774.31611 , -7262542.659752, -7262311.003394, -7262079.347036,\n", - " -7261847.690677, -7261616.034319, -7261384.377961, -7261152.721603,\n", - " -7260921.065244, -7260689.408886, -7260457.752528, -7260226.09617 ,\n", - " -7259994.439811, -7259762.783453, -7259531.127095, -7259299.470737,\n", - " -7259067.814378, -7258836.15802 , -7258604.501662, -7258372.845303,\n", - " -7258141.188945, -7257909.532587, -7257677.876229, -7257446.21987 ,\n", - " -7257214.563512, -7256982.907154, -7256751.250796, -7256519.594437,\n", - " -7256287.938079, -7256056.281721, -7255824.625363, -7255592.969004,\n", - " -7255361.312646, -7255129.656288, -7254897.99993 , -7254666.343571,\n", - " -7254434.687213, -7254203.030855, -7253971.374496, -7253739.718138,\n", - " -7253508.06178 , -7253276.405422, -7253044.749063, -7252813.092705,\n", - " -7252581.436347, -7252349.779989, -7252118.12363 , -7251886.467272,\n", - " -7251654.810914, -7251423.154556, -7251191.498197, -7250959.841839,\n", - " -7250728.185481, -7250496.529122, -7250264.872764, -7250033.216406,\n", - " -7249801.560048, -7249569.903689, -7249338.247331, -7249106.590973,\n", - " -7248874.934615, -7248643.278256, -7248411.621898, -7248179.96554 ,\n", - " -7247948.309182, -7247716.652823, -7247484.996465, -7247253.340107,\n", - " -7247021.683749, -7246790.02739 , -7246558.371032, -7246326.714674,\n", - " -7246095.058315, -7245863.401957, -7245631.745599, -7245400.089241,\n", - " -7245168.432882, -7244936.776524, -7244705.120166, -7244473.463808,\n", - " -7244241.807449, -7244010.151091, -7243778.494733, -7243546.838375,\n", - " -7243315.182016, -7243083.525658, -7242851.8693 , -7242620.212942,\n", - " -7242388.556583, -7242156.900225, -7241925.243867, -7241693.587508,\n", - " -7241461.93115 , -7241230.274792, -7240998.618434, -7240766.962075,\n", - " -7240535.305717, -7240303.649359, -7240071.993001, -7239840.336642,\n", - " -7239608.680284, -7239377.023926, -7239145.367568, -7238913.711209,\n", - " -7238682.054851, -7238450.398493, -7238218.742135, -7237987.085776,\n", - " -7237755.429418, -7237523.77306 , -7237292.116701, -7237060.460343,\n", - " -7236828.803985, -7236597.147627, -7236365.491268, -7236133.83491 ,\n", - " -7235902.178552, -7235670.522194, -7235438.865835, -7235207.209477,\n", - " -7234975.553119, -7234743.896761, -7234512.240402, -7234280.584044,\n", - " -7234048.927686, -7233817.271327, -7233585.614969, -7233353.958611,\n", - " -7233122.302253, -7232890.645894, -7232658.989536, -7232427.333178,\n", - " -7232195.67682 , -7231964.020461, -7231732.364103, -7231500.707745,\n", - " -7231269.051387, -7231037.395028, -7230805.73867 , -7230574.082312,\n", - " -7230342.425954, -7230110.769595, -7229879.113237, -7229647.456879,\n", - " -7229415.80052 , -7229184.144162, -7228952.487804, -7228720.831446,\n", - " -7228489.175087, -7228257.518729, -7228025.862371, -7227794.206013])
<xarray.Dataset>\n", - "Dimensions: (time: 2, x: 10, y: 10)\n", - "Coordinates:\n", - " spatial_ref int64 0\n", - " * x (x) float64 4.663e+05 4.663e+05 ... 4.663e+05 4.663e+05\n", - " * time (time) datetime64[ns] 2016-12-19T10:27:29.687763 2016-12-29T...\n", - " * y (y) float64 8.085e+06 8.085e+06 ... 8.085e+06 8.085e+06\n", - "Data variables:\n", - " blue (time, y, x) float64 6.611 5.581 0.3996 ... 3.491 5.056 3.368\n", - " green (time, y, x) float64 7.921 66.15 30.1 ... 21.76 27.29 18.41
array(0)
array([466267.5, 466270.5, 466273.5, 466276.5, 466279.5, 466282.5, 466285.5,\n", - " 466288.5, 466291.5, 466294.5])
array(['2016-12-19T10:27:29.687763000', '2016-12-29T12:52:42.347451000'],\n", - " dtype='datetime64[ns]')
array([8084698.5, 8084695.5, 8084692.5, 8084689.5, 8084686.5, 8084683.5,\n", - " 8084680.5, 8084677.5, 8084674.5, 8084671.5])
array([[[6.611017, 5.580979, 0.399607, 2.052803, 5.479985, 4.760219,\n", - " 5.077927, 5.574792, 0.726683, 5.170288],\n", - " [4.535516, 0.088263, 4.222302, 0.289199, 3.478147, 3.227945,\n", - " 2.736443, 2.821799, 1.04221 , 1.099616],\n", - " [0.071364, 4.393267, 0.496907, 2.311926, 1.19123 , 5.984189,\n", - " 5.266977, 1.146988, 3.219185, 0.982011],\n", - " [5.818552, 5.176065, 4.891903, 4.557147, 4.706706, 4.670835,\n", - " 2.344188, 0.493237, 2.707488, 4.15662 ],\n", - " [0.78458 , 3.838047, 0.803847, 1.588861, 5.002692, 2.565792,\n", - " 5.195116, 2.642173, 1.47148 , 6.854931],\n", - " [4.407593, 6.734126, 3.702884, 4.682068, 0.498895, 2.931409,\n", - " 5.713952, 6.388658, 6.618634, 3.267176],\n", - " [6.175152, 2.331721, 6.829246, 1.650656, 5.530157, 0.167316,\n", - " 6.406627, 4.559968, 0.083601, 1.231478],\n", - " [2.438085, 1.593681, 5.902827, 2.124327, 1.870033, 4.519462,\n", - " 4.845505, 6.214475, 2.237778, 4.178488],\n", - " [5.855639, 5.824333, 6.006212, 5.43411 , 3.393885, 6.183958,\n", - " 1.284061, 0.028555, 6.726706, 1.516993],\n", - " [0.844103, 5.61329 , 6.978045, 1.183779, 2.539376, 1.268038,\n", - " 1.27574 , 5.101129, 2.020225, 2.081746]],\n", - "\n", - " [[6.242907, 6.394763, 1.302492, 2.895077, 0.537664, 5.012127,\n", - " 4.380069, 1.624007, 5.56236 , 4.303491],\n", - " [6.745937, 3.653031, 5.116965, 1.835256, 3.514732, 6.523679,\n", - " 0.17986 , 2.444248, 5.169099, 0.584623],\n", - " [2.119518, 0.479052, 4.274714, 6.019049, 6.346329, 2.858441,\n", - " 3.976887, 4.218218, 4.320734, 3.941107],\n", - " [2.974649, 6.67996 , 3.16875 , 3.507162, 2.909711, 1.359386,\n", - " 4.023015, 0.258809, 2.007625, 5.744746],\n", - " [2.572982, 1.250802, 4.652006, 6.310958, 3.266477, 3.216613,\n", - " 3.47315 , 1.713415, 5.9575 , 2.729016],\n", - " [2.743341, 4.203021, 1.306215, 2.840115, 5.351451, 2.774677,\n", - " 2.621753, 5.288077, 0.722618, 4.086008],\n", - " [3.168672, 4.031863, 5.775087, 3.487762, 6.034522, 4.721887,\n", - " 5.093018, 0.386024, 2.278799, 1.536351],\n", - " [4.548959, 4.797754, 6.371152, 3.69425 , 6.855343, 1.987757,\n", - " 4.749654, 1.750539, 1.804795, 4.966536],\n", - " [1.78139 , 1.544275, 1.750026, 3.373274, 2.604254, 5.894015,\n", - " 3.217723, 5.637478, 2.900347, 2.097813],\n", - " [1.844174, 6.994962, 2.504717, 5.292132, 0.184411, 4.870834,\n", - " 1.888442, 3.491315, 5.055704, 3.368395]]])
array([[[ 7.920639, 66.150832, 30.096116, 30.437197, 57.794734, 11.538647,\n", - " 14.426782, 35.593171, 53.784858, 0.449093],\n", - " [23.804111, 67.910347, 18.694533, 30.41474 , 68.117674, 44.906057,\n", - " 62.311842, 37.485047, 57.134336, 7.52572 ],\n", - " [36.653481, 39.596833, 61.07603 , 56.883093, 29.635613, 64.114699,\n", - " 42.341689, 54.724789, 31.872344, 11.282554],\n", - " [32.502176, 28.090549, 58.398927, 41.224019, 34.804777, 32.184565,\n", - " 59.392327, 9.155824, 52.967172, 67.409236],\n", - " [18.79468 , 8.543429, 35.834698, 3.596245, 30.252802, 41.549499,\n", - " 23.060248, 7.267762, 27.374099, 0.684024],\n", - " [ 9.93227 , 44.509446, 22.01927 , 28.514121, 36.715233, 15.03939 ,\n", - " 2.43399 , 0.636075, 34.43023 , 37.024545],\n", - " [28.874823, 1.514255, 34.210822, 10.49793 , 54.025491, 64.294026,\n", - " 36.212813, 17.766628, 45.295952, 10.349576],\n", - " [58.961924, 47.334638, 64.844646, 37.634131, 7.815025, 35.139303,\n", - " 18.501505, 35.01185 , 27.761908, 13.240655],\n", - " [49.544668, 57.716538, 27.389778, 11.604377, 24.826367, 15.449456,\n", - " 26.482386, 42.855739, 10.4958 , 59.267182],\n", - " [37.094093, 43.294246, 33.240747, 16.85513 , 54.705119, 14.633291,\n", - " 35.138742, 50.101683, 57.495953, 52.795405]],\n", - "\n", - " [[26.137058, 16.448086, 4.503539, 33.351036, 20.32524 , 63.369743,\n", - " 11.531512, 38.629561, 59.821441, 11.547508],\n", - " [ 6.438471, 28.948907, 9.949052, 23.234921, 65.539507, 9.822554,\n", - " 55.754023, 51.590388, 57.047098, 4.929671],\n", - " [32.360472, 17.939979, 52.889505, 69.181176, 39.2923 , 56.442225,\n", - " 5.699603, 21.092554, 8.93472 , 23.810367],\n", - " [ 3.715695, 30.653733, 44.540496, 48.578544, 24.032477, 30.339109,\n", - " 37.376636, 58.787274, 49.308994, 59.510765],\n", - " [37.991912, 50.023013, 63.711135, 26.546118, 60.561058, 36.098302,\n", - " 10.725673, 40.51609 , 47.479255, 42.710909],\n", - " [15.32887 , 15.878984, 30.914778, 25.902812, 3.815428, 35.530366,\n", - " 48.426293, 44.280075, 4.468083, 18.00032 ],\n", - " [58.286993, 20.397714, 63.124 , 69.117495, 1.32436 , 29.024715,\n", - " 31.601531, 34.307982, 7.990292, 36.159696],\n", - " [34.674335, 62.344993, 5.714717, 2.161448, 66.714977, 52.443751,\n", - " 12.791859, 63.707491, 13.697063, 9.394474],\n", - " [16.766346, 26.260123, 40.68158 , 18.695085, 64.812126, 23.151592,\n", - " 50.597642, 61.308205, 31.517123, 23.469797],\n", - " [44.670989, 17.533084, 39.034907, 32.676726, 53.275139, 48.731172,\n", - " 12.958856, 21.760335, 27.292202, 18.409063]]])
<xarray.Dataset>\n", - "Dimensions: (time: 2, x: 10, y: 10)\n", - "Coordinates:\n", - " * x (x) float64 -51.32 -51.32 -51.32 ... -51.32 -51.32 -51.32\n", - " * y (y) float64 -17.32 -17.32 -17.32 ... -17.32 -17.32 -17.32\n", - " * time (time) datetime64[ns] 2016-12-19T10:27:29.687763 2016-12-29T...\n", - " spatial_ref int64 0\n", - "Data variables:\n", - " blue (time, y, x) float64 6.611 5.581 0.3996 ... 3.491 5.056 3.368\n", - " green (time, y, x) float64 7.921 66.15 30.1 ... 21.76 27.29 18.41
array([-51.317454, -51.317426, -51.317399, -51.317371, -51.317343, -51.317316,\n", - " -51.317288, -51.31726 , -51.317233, -51.317205])
array([-17.322876, -17.322903, -17.322931, -17.322959, -17.322986, -17.323014,\n", - " -17.323042, -17.323069, -17.323097, -17.323125])
array(['2016-12-19T10:27:29.687763000', '2016-12-29T12:52:42.347451000'],\n", - " dtype='datetime64[ns]')
array(0)
array([[[6.61101706, 5.58097901, 0.39960727, 2.05280345, 5.47998484,\n", - " 4.76021916, 5.07792715, 5.57479217, 0.72668295, 5.17028805],\n", - " [4.53551623, 0.08826297, 4.22230213, 0.28919903, 3.47814709,\n", - " 3.22794508, 2.73644333, 2.82179869, 1.04221025, 1.09961647],\n", - " [0.07136352, 4.39326719, 0.49690677, 2.31192634, 1.19123023,\n", - " 5.98418893, 5.26697738, 1.14698827, 3.21918509, 0.98201077],\n", - " [5.81855223, 5.17606488, 4.89190312, 4.55714657, 4.70670585,\n", - " 4.67083516, 2.34418779, 0.4932369 , 2.70748782, 4.15661977],\n", - " [0.78457995, 3.83804728, 0.80384702, 1.58886148, 5.00269192,\n", - " 2.56579153, 5.19511563, 2.6421726 , 1.47148022, 6.85493052],\n", - " [4.40759261, 6.73412648, 3.70288412, 4.68206812, 0.49889505,\n", - " 2.93140894, 5.71395224, 6.38865828, 6.61863439, 3.26717611],\n", - " [6.17515163, 2.33172093, 6.82924574, 1.65065578, 5.53015689,\n", - " 0.16731611, 6.40662729, 4.55996818, 0.08360072, 1.23147801],\n", - " [2.43808458, 1.59368058, 5.90282702, 2.12432736, 1.8700329 ,\n", - " 4.51946233, 4.84550488, 6.21447495, 2.23777817, 4.17848811],\n", - " [5.85563919, 5.82433331, 6.00621159, 5.43410984, 3.39388501,\n", - " 6.18395763, 1.28406097, 0.02855524, 6.72670605, 1.51699291],\n", - " [0.84410255, 5.61328959, 6.97804534, 1.18377926, 2.53937552,\n", - " 1.26803824, 1.27574049, 5.10112907, 2.02022521, 2.08174614]],\n", - "...\n", - " [[6.24290664, 6.39476317, 1.30249235, 2.89507681, 0.5376643 ,\n", - " 5.01212706, 4.38006939, 1.62400749, 5.56236028, 4.30349107],\n", - " [6.74593668, 3.65303136, 5.11696455, 1.83525557, 3.51473159,\n", - " 6.52367921, 0.17986011, 2.44424775, 5.16909939, 0.58462293],\n", - " [2.11951807, 0.47905249, 4.27471358, 6.01904917, 6.34632929,\n", - " 2.85844128, 3.97688665, 4.21821752, 4.32073426, 3.9411069 ],\n", - " [2.97464873, 6.67996018, 3.16875018, 3.50716191, 2.9097115 ,\n", - " 1.35938603, 4.0230148 , 0.2588095 , 2.00762528, 5.74474576],\n", - " [2.57298212, 1.25080228, 4.65200582, 6.31095753, 3.26647688,\n", - " 3.21661254, 3.4731501 , 1.71341466, 5.95750029, 2.72901564],\n", - " [2.74334064, 4.20302139, 1.30621538, 2.84011532, 5.35145147,\n", - " 2.77467654, 2.6217532 , 5.28807657, 0.72261773, 4.08600786],\n", - " [3.1686723 , 4.0318634 , 5.77508699, 3.48776179, 6.0345218 ,\n", - " 4.72188719, 5.09301838, 0.38602444, 2.27879875, 1.5363515 ],\n", - " [4.54895876, 4.79775447, 6.3711522 , 3.69424969, 6.85534296,\n", - " 1.98775739, 4.74965416, 1.75053883, 1.80479493, 4.96653593],\n", - " [1.78138972, 1.54427475, 1.75002571, 3.37327355, 2.60425433,\n", - " 5.89401538, 3.2177233 , 5.63747771, 2.90034677, 2.09781306],\n", - " [1.84417405, 6.99496234, 2.50471667, 5.29213205, 0.1844106 ,\n", - " 4.87083388, 1.88844217, 3.49131529, 5.05570379, 3.36839469]]])
array([[[ 7.92063876, 66.15083163, 30.09611622, 30.43719706,\n", - " 57.79473407, 11.53864677, 14.42678195, 35.5931706 ,\n", - " 53.78485769, 0.44909335],\n", - " [23.80411143, 67.91034696, 18.6945325 , 30.41473977,\n", - " 68.11767437, 44.90605663, 62.31184244, 37.48504719,\n", - " 57.13433649, 7.52571983],\n", - " [36.65348061, 39.59683262, 61.07602968, 56.88309328,\n", - " 29.63561283, 64.11469949, 42.34168918, 54.7247894 ,\n", - " 31.87234425, 11.28255401],\n", - " [32.50217644, 28.0905485 , 58.39892665, 41.22401876,\n", - " 34.80477692, 32.18456541, 59.39232697, 9.15582412,\n", - " 52.96717208, 67.40923645],\n", - " [18.79468009, 8.54342894, 35.83469773, 3.59624497,\n", - " 30.25280185, 41.54949902, 23.06024792, 7.26776162,\n", - " 27.37409939, 0.68402392],\n", - " [ 9.93226996, 44.50944555, 22.01927024, 28.5141208 ,\n", - " 36.71523344, 15.03939035, 2.43398964, 0.63607503,\n", - " 34.43022974, 37.02454468],\n", - " [28.87482271, 1.51425484, 34.21082199, 10.49792965,\n", - " 54.02549098, 64.29402555, 36.21281265, 17.76662845,\n", - "...\n", - " 24.03247674, 30.33910879, 37.37663568, 58.78727396,\n", - " 49.30899357, 59.51076479],\n", - " [37.99191205, 50.0230132 , 63.71113469, 26.54611832,\n", - " 60.5610581 , 36.09830221, 10.72567322, 40.51609048,\n", - " 47.47925466, 42.71090946],\n", - " [15.32887012, 15.8789836 , 30.91477844, 25.90281227,\n", - " 3.81542846, 35.53036591, 48.42629307, 44.28007476,\n", - " 4.46808288, 18.00031979],\n", - " [58.28699332, 20.39771424, 63.12400017, 69.11749464,\n", - " 1.32435965, 29.02471509, 31.60153124, 34.30798249,\n", - " 7.99029205, 36.15969559],\n", - " [34.67433533, 62.34499312, 5.71471651, 2.16144808,\n", - " 66.71497747, 52.4437509 , 12.79185872, 63.70749094,\n", - " 13.69706259, 9.39447357],\n", - " [16.76634605, 26.26012306, 40.68158023, 18.69508482,\n", - " 64.81212607, 23.15159177, 50.59764238, 61.30820521,\n", - " 31.51712269, 23.46979658],\n", - " [44.67098863, 17.53308399, 39.03490717, 32.67672611,\n", - " 53.27513876, 48.73117231, 12.95885589, 21.760335 ,\n", - " 27.29220175, 18.40906318]]])
<xarray.DataArray 'spatial_ref' ()>\n", - "array(0)\n", - "Coordinates:\n", - " spatial_ref int64 0\n", - "Attributes:\n", - " spatial_ref: PROJCS["WGS 84 / UTM zone 22S",GEOGCS["WGS 84",DATUM["WGS_1...
array(0)
array(0)
<xarray.DataArray 'spatial_ref' ()>\n", - "array(0)\n", - "Coordinates:\n", - " spatial_ref int64 0\n", - "Attributes:\n", - " crs_wkt: GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["...\n", - " semi_major_axis: 6378137.0\n", - " semi_minor_axis: 6356752.314245179\n", - " inverse_flattening: 298.257223563\n", - " reference_ellipsoid_name: WGS 84\n", - " longitude_of_prime_meridian: 0.0\n", - " prime_meridian_name: Greenwich\n", - " geographic_crs_name: WGS 84\n", - " grid_mapping_name: latitude_longitude\n", - " spatial_ref: GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["...
array(0)
array(0)