diff --git a/README.rst b/README.rst index 195e6ae..1f211f3 100644 --- a/README.rst +++ b/README.rst @@ -16,7 +16,9 @@ Many image processing applications benefit from representing images at multiple Implementation ************** -At the moment, this package generates an image pyramid by using the ``dask.array.coarsen`` (`docs `_) to apply a reducing function to contiguous, non-overlapping chunks of the input data. With this implementation, it is not possible to generate a "Gaussian" image pyramid (i.e., a sequence of images that are recursively smoothed with a Gaussian filter and then resampled) because this exceeds the capabilities of ``dask.array.coarsen``. Gaussian pyramid support might be added in the future. +The top-level function `multiscale` takes two main arguments: data to be downscaled, and a reduction function. The reduction function can use any implementation but it should (eagerly) take array data and a tuple of scale factors as inputs and return downscaled data as an output. See examples of reduction functions in `xarray_multiscale.reducers `_. + +Note that the current implementation divides the input data into *contiguous* chunks. This means that attempting to use downscaling schemes based on sliding windowed smoothing will produce edge artifacts. Future versions of this package could enable applying the reduction function to *overlapping* chunks, which would enable more elaborate downscaling routines. Usage @@ -27,10 +29,10 @@ Generate a lazy multiscale representation of a numpy array: .. code-block:: python from xarray_multiscale import multiscale - import numpy as np + from xarray_multiscale.reducers import windowed_mean data = np.arange(4) - multiscale(data, np.mean, (2,)) + multiscale(data, windowed_mean, (2,)) which returns this (a collection of DataArrays, each with decreasing size): @@ -53,7 +55,7 @@ Generate a lazy multiscale representation of an ``xarray.DataArray``: .. code-block:: python from xarray_multiscale import multiscale - import numpy as np + from xarray_multiscale.reducers import windowed_mean from xarray import DataArray data = np.arange(16).reshape((4,4)) @@ -61,7 +63,7 @@ Generate a lazy multiscale representation of an ``xarray.DataArray``: DataArray(np.arange(data.shape[0]), dims=('x',), attrs={'units' : 'm'})) dataarray = DataArray(data, coords) - multiscale(dataarray, np.mean, (2,2)) + multiscale(dataarray, windowed_mean, (2,2)) which returns this: diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index dc4e882..0000000 --- a/poetry.lock +++ /dev/null @@ -1,822 +0,0 @@ -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "asciitree" -version = "0.3.3" -description = "Draws ASCII trees." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "atomicwrites" -version = "1.4.0" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "attrs" -version = "21.2.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] - -[[package]] -name = "black" -version = "20.8b1" -description = "The uncompromising code formatter." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -appdirs = "*" -click = ">=7.1.2" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.6,<1" -regex = ">=2020.1.8" -toml = ">=0.10.1" -typed-ast = ">=1.4.0" -typing-extensions = ">=3.7.4" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] - -[[package]] -name = "click" -version = "8.0.1" -description = "Composable command line interface toolkit" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - -[[package]] -name = "cloudpickle" -version = "1.6.0" -description = "Extended pickling support for Python objects" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "colorama" -version = "0.4.4" -description = "Cross-platform colored terminal text." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "dask" -version = "2021.6.2" -description = "Parallel PyData with Task Scheduling" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -cloudpickle = ">=1.1.1" -fsspec = ">=0.6.0" -partd = ">=0.3.10" -pyyaml = "*" -toolz = ">=0.8.2" - -[package.extras] -array = ["numpy (>=1.16)"] -complete = ["bokeh (>=1.0.0,!=2.0.0)", "distributed (==2021.06.2)", "numpy (>=1.16)", "pandas (>=0.25.0)"] -dataframe = ["numpy (>=1.16)", "pandas (>=0.25.0)"] -diagnostics = ["bokeh (>=1.0.0,!=2.0.0)"] -distributed = ["distributed (==2021.06.2)"] -test = ["pytest", "pytest-rerunfailures", "pytest-xdist"] - -[[package]] -name = "fasteners" -version = "0.16.3" -description = "A python package that provides useful locks." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -six = "*" - -[[package]] -name = "fsspec" -version = "2021.6.1" -description = "File-system specification" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -dask = ["dask", "distributed"] -dropbox = ["dropboxdrivefs", "requests", "dropbox"] -entrypoints = ["importlib-metadata"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -hdfs = ["pyarrow (>=1)"] -http = ["requests", "aiohttp"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] - -[[package]] -name = "importlib-metadata" -version = "4.6.1" -description = "Read metadata from Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -perf = ["ipython"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] - -[[package]] -name = "locket" -version = "0.2.1" -description = "File-based locks for Python for Linux and Windows" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "more-itertools" -version = "8.8.0" -description = "More routines for operating on iterables, beyond itertools" -category = "dev" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "mypy" -version = "0.790" -description = "Optional static typing for Python" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -mypy-extensions = ">=0.4.3,<0.5.0" -typed-ast = ">=1.4.0,<1.5.0" -typing-extensions = ">=3.7.4" - -[package.extras] -dmypy = ["psutil (>=4.0)"] - -[[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "numcodecs" -version = "0.8.0" -description = "A Python package providing buffer compression and transformation codecs for use in data storage and communication applications." -category = "dev" -optional = false -python-versions = ">=3.6, <4" - -[package.dependencies] -numpy = ">=1.7" - -[package.extras] -msgpack = ["msgpack"] - -[[package]] -name = "numpy" -version = "1.21.0" -description = "NumPy is the fundamental package for array computing with Python." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "packaging" -version = "21.0" -description = "Core utilities for Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2" - -[[package]] -name = "pandas" -version = "1.3.0" -description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" -optional = false -python-versions = ">=3.7.1" - -[package.dependencies] -numpy = ">=1.17.3" -python-dateutil = ">=2.7.3" -pytz = ">=2017.3" - -[package.extras] -test = ["hypothesis (>=3.58)", "pytest (>=6.0)", "pytest-xdist"] - -[[package]] -name = "partd" -version = "1.2.0" -description = "Appendable key-value storage" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -locket = "*" -toolz = "*" - -[package.extras] -complete = ["numpy (>=1.9.0)", "pandas (>=0.19.0)", "pyzmq", "blosc"] - -[[package]] -name = "pathspec" -version = "0.8.1" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "pluggy" -version = "0.13.1" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - -[package.extras] -dev = ["pre-commit", "tox"] - -[[package]] -name = "py" -version = "1.10.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pyparsing" -version = "2.4.7" -description = "Python parsing module" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "pytest" -version = "5.4.3" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=17.4.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -more-itertools = ">=4.0.0" -packaging = "*" -pluggy = ">=0.12,<1.0" -py = ">=1.5.0" -wcwidth = "*" - -[package.extras] -checkqa-mypy = ["mypy (==v0.761)"] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] - -[[package]] -name = "python-dateutil" -version = "2.8.1" -description = "Extensions to the standard Python datetime module" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytz" -version = "2021.1" -description = "World timezone definitions, modern and historical" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "pyyaml" -version = "5.4.1" -description = "YAML parser and emitter for Python" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[[package]] -name = "regex" -version = "2021.7.6" -description = "Alternative regular expression module, to replace re." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "scipy" -version = "1.6.1" -description = "SciPy: Scientific Library for Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -numpy = ">=1.16.5" - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "toolz" -version = "0.11.1" -description = "List processing tools and functional utilities" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "typed-ast" -version = "1.4.3" -description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "typing-extensions" -version = "3.10.0.0" -description = "Backported and Experimental Type Hints for Python 3.5+" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "wcwidth" -version = "0.2.5" -description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "xarray" -version = "0.18.2" -description = "N-D labeled arrays and datasets in Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -numpy = ">=1.17" -pandas = ">=1.0" - -[package.extras] -accel = ["scipy", "bottleneck", "numbagg"] -complete = ["netcdf4", "h5netcdf", "scipy", "pydap", "zarr", "fsspec", "cftime", "rasterio", "cfgrib", "pooch", "bottleneck", "numbagg", "dask", "matplotlib", "seaborn", "nc-time-axis"] -docs = ["netcdf4", "h5netcdf", "scipy", "pydap", "zarr", "fsspec", "cftime", "rasterio", "cfgrib", "pooch", "bottleneck", "numbagg", "dask", "matplotlib", "seaborn", "nc-time-axis", "sphinx-autosummary-accessors", "sphinx-rtd-theme", "ipython", "ipykernel", "jupyter-client", "nbsphinx", "scanpydoc"] -io = ["netcdf4", "h5netcdf", "scipy", "pydap", "zarr", "fsspec", "cftime", "rasterio", "cfgrib", "pooch"] -parallel = ["dask"] -viz = ["matplotlib", "seaborn", "nc-time-axis"] - -[[package]] -name = "zarr" -version = "2.8.3" -description = "An implementation of chunked, compressed, N-dimensional arrays for Python." -category = "dev" -optional = false -python-versions = ">=3.6, <4" - -[package.dependencies] -asciitree = "*" -fasteners = "*" -numcodecs = ">=0.6.4" -numpy = ">=1.7" - -[package.extras] -jupyter = ["notebook", "ipytree"] - -[[package]] -name = "zipp" -version = "3.5.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] - -[metadata] -lock-version = "1.1" -python-versions = ">=3.7.1,<4" -content-hash = "d7bc9805ed3b0a8ae4c856ae419890cf4216d6f9e6e1ed2dc2b8e779ad6c426b" - -[metadata.files] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -asciitree = [ - {file = "asciitree-0.3.3.tar.gz", hash = "sha256:4aa4b9b649f85e3fcb343363d97564aa1fb62e249677f2e18a96765145cc0f6e"}, -] -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, -] -black = [ - {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, -] -click = [ - {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, - {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, -] -cloudpickle = [ - {file = "cloudpickle-1.6.0-py3-none-any.whl", hash = "sha256:3a32d0eb0bc6f4d0c57fbc4f3e3780f7a81e6fee0fa935072884d58ae8e1cc7c"}, - {file = "cloudpickle-1.6.0.tar.gz", hash = "sha256:9bc994f9e9447593bd0a45371f0e7ac7333710fcf64a4eb9834bf149f4ef2f32"}, -] -colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -dask = [ - {file = "dask-2021.6.2-py3-none-any.whl", hash = "sha256:1f18d0815154b938a529ac3081c8952998d709319e57bbc484b42f0094217d43"}, - {file = "dask-2021.6.2.tar.gz", hash = "sha256:8588fcd1a42224b7cfcd2ebc8ad616734abb6b1a4517efd52d89c7dd66eb91f8"}, -] -fasteners = [ - {file = "fasteners-0.16.3-py2.py3-none-any.whl", hash = "sha256:8408e52656455977053871990bd25824d85803b9417aa348f10ba29ef0c751f7"}, - {file = "fasteners-0.16.3.tar.gz", hash = "sha256:b1ab4e5adfbc28681ce44b3024421c4f567e705cc3963c732bf1cba3348307de"}, -] -fsspec = [ - {file = "fsspec-2021.6.1-py3-none-any.whl", hash = "sha256:0ca23992f425c1ba61bf11d3cb3af8ad5363be8612e26732b520090556f173f2"}, - {file = "fsspec-2021.6.1.tar.gz", hash = "sha256:2cdaafb51dd71e062afffcabcbc4925acef95f9bdd8d822d2010e4bf92951bd7"}, -] -importlib-metadata = [ - {file = "importlib_metadata-4.6.1-py3-none-any.whl", hash = "sha256:9f55f560e116f8643ecf2922d9cd3e1c7e8d52e683178fecd9d08f6aa357e11e"}, - {file = "importlib_metadata-4.6.1.tar.gz", hash = "sha256:079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac"}, -] -locket = [ - {file = "locket-0.2.1-py2.py3-none-any.whl", hash = "sha256:12b6ada59d1f50710bca9704dbadd3f447dbf8dac6664575c1281cadab8e6449"}, - {file = "locket-0.2.1.tar.gz", hash = "sha256:3e1faba403619fe201552f083f1ecbf23f550941bc51985ac6ed4d02d25056dd"}, -] -more-itertools = [ - {file = "more-itertools-8.8.0.tar.gz", hash = "sha256:83f0308e05477c68f56ea3a888172c78ed5d5b3c282addb67508e7ba6c8f813a"}, - {file = "more_itertools-8.8.0-py3-none-any.whl", hash = "sha256:2cf89ec599962f2ddc4d568a05defc40e0a587fbc10d5989713638864c36be4d"}, -] -mypy = [ - {file = "mypy-0.790-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:bd03b3cf666bff8d710d633d1c56ab7facbdc204d567715cb3b9f85c6e94f669"}, - {file = "mypy-0.790-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:2170492030f6faa537647d29945786d297e4862765f0b4ac5930ff62e300d802"}, - {file = "mypy-0.790-cp35-cp35m-win_amd64.whl", hash = "sha256:e86bdace26c5fe9cf8cb735e7cedfe7850ad92b327ac5d797c656717d2ca66de"}, - {file = "mypy-0.790-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e97e9c13d67fbe524be17e4d8025d51a7dca38f90de2e462243ab8ed8a9178d1"}, - {file = "mypy-0.790-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0d34d6b122597d48a36d6c59e35341f410d4abfa771d96d04ae2c468dd201abc"}, - {file = "mypy-0.790-cp36-cp36m-win_amd64.whl", hash = "sha256:72060bf64f290fb629bd4a67c707a66fd88ca26e413a91384b18db3876e57ed7"}, - {file = "mypy-0.790-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:eea260feb1830a627fb526d22fbb426b750d9f5a47b624e8d5e7e004359b219c"}, - {file = "mypy-0.790-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c614194e01c85bb2e551c421397e49afb2872c88b5830e3554f0519f9fb1c178"}, - {file = "mypy-0.790-cp37-cp37m-win_amd64.whl", hash = "sha256:0a0d102247c16ce93c97066443d11e2d36e6cc2a32d8ccc1f705268970479324"}, - {file = "mypy-0.790-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cf4e7bf7f1214826cf7333627cb2547c0db7e3078723227820d0a2490f117a01"}, - {file = "mypy-0.790-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:af4e9ff1834e565f1baa74ccf7ae2564ae38c8df2a85b057af1dbbc958eb6666"}, - {file = "mypy-0.790-cp38-cp38-win_amd64.whl", hash = "sha256:da56dedcd7cd502ccd3c5dddc656cb36113dd793ad466e894574125945653cea"}, - {file = "mypy-0.790-py3-none-any.whl", hash = "sha256:2842d4fbd1b12ab422346376aad03ff5d0805b706102e475e962370f874a5122"}, - {file = "mypy-0.790.tar.gz", hash = "sha256:2b21ba45ad9ef2e2eb88ce4aeadd0112d0f5026418324176fd494a6824b74975"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -numcodecs = [ - {file = "numcodecs-0.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7a88075fa31b353dea5530bf7d0a358aca93f57aecb62edca13ea142532dfcd4"}, - {file = "numcodecs-0.8.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:d41312116974845e21c942241520d951b88f3b53882695dd16650dc3ed9bd82b"}, - {file = "numcodecs-0.8.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e602cab14c9e4e0bf1563b0f44f115a64fddfb0b6b52fc83de1746d7cdfd69ff"}, - {file = "numcodecs-0.8.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:019861b72c5afab4732d96ba6b53d1b56ed14eb8d810bca4909e71b5c58ddece"}, - {file = "numcodecs-0.8.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:ac07c7c5dd7a4ed4fbee5cb79b0acbdc6474c3b3280c7cdb97dc1274ca438feb"}, - {file = "numcodecs-0.8.0-cp36-cp36m-win32.whl", hash = "sha256:972955f1d6d650e7e4efd29fbe7697050e56b3f04fb2f58de13faec5bc19365f"}, - {file = "numcodecs-0.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:313c07960eade9169454ba1dae55973c2131ada45c3eaf1c572d4677e3804f14"}, - {file = "numcodecs-0.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ceb27f735cb16e2f0516bcb92c52aaa14ed2a54e11f994b0232596d19ad41b8"}, - {file = "numcodecs-0.8.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b6c0132bcf5e232f9b70c2dc8c6b6e0248c380fda46d0979252b94168b31a3a6"}, - {file = "numcodecs-0.8.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:88b5faf32db97f7295e71a87d734cb0bf1cc441ab63b73cb739f0c096a765d5d"}, - {file = "numcodecs-0.8.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:a5d881290ec51da96e0a903e0669bad09b7d9ac8be05810e86770fb5b742a53b"}, - {file = "numcodecs-0.8.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:a55883a6349f827fc87fab87b7ad0b27752801a6fb5a6c0b518d1c5e59ba3c54"}, - {file = "numcodecs-0.8.0-cp37-cp37m-win32.whl", hash = "sha256:c16fc74473cfff5a3a838884b2318216afaeeb61360765c76082c421d0d4587f"}, - {file = "numcodecs-0.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3154e4b85ed20b4741fb75a5d58aba7b2c5f8a5b7096c74d106201bdb3545e7d"}, - {file = "numcodecs-0.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:877bc1b105022481fe660371d1fff22e78c4950d67551a610527f1b20011910c"}, - {file = "numcodecs-0.8.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:d3b9aa1a7ccc09a687b0bd4ff2dae55067c4787001db539987ba4052dc3cd1d8"}, - {file = "numcodecs-0.8.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9549d59986df8f43c40b01a6c671f3fca4a3bf28c3fa7158ef9b6bf904a40f15"}, - {file = "numcodecs-0.8.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:676bfe0f5ff7f9bd66ea2188660584cee2d04e285033dce599cb9538a51e3b88"}, - {file = "numcodecs-0.8.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d6d171f5d924b27a783d66ddaeb5ab7b1d15365705820745d48df43fbe3b4c3d"}, - {file = "numcodecs-0.8.0-cp38-cp38-win32.whl", hash = "sha256:568589d985c2137a4825ddcd1b286d383b4bc6b6fac846e3f09569aaf61ba5ac"}, - {file = "numcodecs-0.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:ea760153a0304748394500b189e6c072cbfc54c809322c4ff9fa85b300790094"}, - {file = "numcodecs-0.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:71536798011177b9d8a00dec721cecb9110cd20ebe98162f14a395c2a2b45c89"}, - {file = "numcodecs-0.8.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:1f01309661b605a285b64d1cb98ff4acf4b45f76559bb34b5b3bbfe921eef71d"}, - {file = "numcodecs-0.8.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f3e177d9d35a4615fe09891548923fd22eabdd716ada28eb2e19075a1793d831"}, - {file = "numcodecs-0.8.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0fb5a4c1144840f86d2930baac4b7678c41d2d46f8dc71f99aff77772a445fa"}, - {file = "numcodecs-0.8.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:562755b5db20588049c7ff60eeb8fbfb1686a3639fe0a3cb55a6c49389ed5b94"}, - {file = "numcodecs-0.8.0-cp39-cp39-win32.whl", hash = "sha256:c9fce99dc72a1b081501f89a7a0c393cde362bd562f5204bac818800e337392b"}, - {file = "numcodecs-0.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:d44b2882c6861d1dbcf5dd4c7c943848929cf48dda8944fc1ef3ebebe993efa3"}, - {file = "numcodecs-0.8.0.tar.gz", hash = "sha256:7c7d0ea56b5e2a267ae785bdce47abed62829ef000f03be8e32e30df62d3749c"}, -] -numpy = [ - {file = "numpy-1.21.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d5caa946a9f55511e76446e170bdad1d12d6b54e17a2afe7b189112ed4412bb8"}, - {file = "numpy-1.21.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ac4fd578322842dbda8d968e3962e9f22e862b6ec6e3378e7415625915e2da4d"}, - {file = "numpy-1.21.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:598fe100b2948465cf3ed64b1a326424b5e4be2670552066e17dfaa67246011d"}, - {file = "numpy-1.21.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c55407f739f0bfcec67d0df49103f9333edc870061358ac8a8c9e37ea02fcd2"}, - {file = "numpy-1.21.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:75579acbadbf74e3afd1153da6177f846212ea2a0cc77de53523ae02c9256513"}, - {file = "numpy-1.21.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cc367c86eb87e5b7c9592935620f22d13b090c609f1b27e49600cd033b529f54"}, - {file = "numpy-1.21.0-cp37-cp37m-win32.whl", hash = "sha256:d89b0dc7f005090e32bb4f9bf796e1dcca6b52243caf1803fdd2b748d8561f63"}, - {file = "numpy-1.21.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eda2829af498946c59d8585a9fd74da3f810866e05f8df03a86f70079c7531dd"}, - {file = "numpy-1.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1a784e8ff7ea2a32e393cc53eb0003eca1597c7ca628227e34ce34eb11645a0e"}, - {file = "numpy-1.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bba474a87496d96e61461f7306fba2ebba127bed7836212c360f144d1e72ac54"}, - {file = "numpy-1.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd0a359c1c17f00cb37de2969984a74320970e0ceef4808c32e00773b06649d9"}, - {file = "numpy-1.21.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e4d5a86a5257843a18fb1220c5f1c199532bc5d24e849ed4b0289fb59fbd4d8f"}, - {file = "numpy-1.21.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:620732f42259eb2c4642761bd324462a01cdd13dd111740ce3d344992dd8492f"}, - {file = "numpy-1.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9205711e5440954f861ceeea8f1b415d7dd15214add2e878b4d1cf2bcb1a914"}, - {file = "numpy-1.21.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ad09f55cc95ed8d80d8ab2052f78cc21cb231764de73e229140d81ff49d8145e"}, - {file = "numpy-1.21.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a1f2fb2da242568af0271455b89aee0f71e4e032086ee2b4c5098945d0e11cf6"}, - {file = "numpy-1.21.0-cp38-cp38-win32.whl", hash = "sha256:e58ddb53a7b4959932f5582ac455ff90dcb05fac3f8dcc8079498d43afbbde6c"}, - {file = "numpy-1.21.0-cp38-cp38-win_amd64.whl", hash = "sha256:d2910d0a075caed95de1a605df00ee03b599de5419d0b95d55342e9a33ad1fb3"}, - {file = "numpy-1.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a290989cd671cd0605e9c91a70e6df660f73ae87484218e8285c6522d29f6e38"}, - {file = "numpy-1.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3537b967b350ad17633b35c2f4b1a1bbd258c018910b518c30b48c8e41272717"}, - {file = "numpy-1.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc6c650f8700ce1e3a77668bb7c43e45c20ac06ae00d22bdf6760b38958c883"}, - {file = "numpy-1.21.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:709884863def34d72b183d074d8ba5cfe042bc3ff8898f1ffad0209161caaa99"}, - {file = "numpy-1.21.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bebab3eaf0641bba26039fb0b2c5bf9b99407924b53b1ea86e03c32c64ef5aef"}, - {file = "numpy-1.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf680682ad0a3bef56dae200dbcbac2d57294a73e5b0f9864955e7dd7c2c2491"}, - {file = "numpy-1.21.0-cp39-cp39-win32.whl", hash = "sha256:d95d16204cd51ff1a1c8d5f9958ce90ae190be81d348b514f9be39f878b8044a"}, - {file = "numpy-1.21.0-cp39-cp39-win_amd64.whl", hash = "sha256:2ba579dde0563f47021dcd652253103d6fd66165b18011dce1a0609215b2791e"}, - {file = "numpy-1.21.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3c40e6b860220ed862e8097b8f81c9af6d7405b723f4a7af24a267b46f90e461"}, - {file = "numpy-1.21.0.zip", hash = "sha256:e80fe25cba41c124d04c662f33f6364909b985f2eb5998aaa5ae4b9587242cce"}, -] -packaging = [ - {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, - {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, -] -pandas = [ - {file = "pandas-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c81b8d91e9ae861eb4406b4e0f8d4dabbc105b9c479b3d1e921fba1d35b5b62a"}, - {file = "pandas-1.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08eeff3da6a188e24db7f292b39a8ca9e073bf841fbbeadb946b3ad5c19d843e"}, - {file = "pandas-1.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:88864c1e28353b958b1f30e4193818519624ad9a1776921622a6a2a016d5d807"}, - {file = "pandas-1.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:872aa91e0f9ca913046ab639d4181a899f5e592030d954d28c2529b88756a736"}, - {file = "pandas-1.3.0-cp37-cp37m-win32.whl", hash = "sha256:92835113a67cbd34747c198d41f09f4b63f6fe11ca5643baebc7ab1e30e89e95"}, - {file = "pandas-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7d3cd2c99faa94d717ca00ea489264a291ad7209453dffbf059bfb7971fd3a61"}, - {file = "pandas-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:823737830364d0e2af8c3912a28ba971296181a07950873492ed94e12d28c405"}, - {file = "pandas-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c746876cdd8380be0c3e70966d4566855901ac9aaa5e4b9ccaa5ca5311457d11"}, - {file = "pandas-1.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe7a549d10ca534797095586883a5c17d140d606747591258869c56e14d1b457"}, - {file = "pandas-1.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f058c786e7b0a9e7fa5e0b9f4422e0ccdd3bf3aa3053c18d77ed2a459bd9a45a"}, - {file = "pandas-1.3.0-cp38-cp38-win32.whl", hash = "sha256:98efc2d4983d5bb47662fe2d97b2c81b91566cb08b266490918b9c7d74a5ef64"}, - {file = "pandas-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e6b75091fa54a53db3927b4d1bc997c23c5ba6f87acdfe1ee5a92c38c6b2ed6a"}, - {file = "pandas-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1ff13eed501e07e7fb26a4ea18a846b6e5d7de549b497025601fd9ccb7c1d123"}, - {file = "pandas-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:798675317d0e4863a92a9a6bc5bd2490b5f6fef8c17b95f29e2e33f28bef9eca"}, - {file = "pandas-1.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed4fc66f23fe17c93a5d439230ca2d6b5f8eac7154198d327dbe8a16d98f3f10"}, - {file = "pandas-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:522bfea92f3ef6207cadc7428bda1e7605dae0383b8065030e7b5d0266717b48"}, - {file = "pandas-1.3.0-cp39-cp39-win32.whl", hash = "sha256:7897326cae660eee69d501cbfa950281a193fcf407393965e1bc07448e1cc35a"}, - {file = "pandas-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b10d7910ae9d7920a5ff7816d794d99acbc361f7b16a0f017d4fa83ced8cb55e"}, - {file = "pandas-1.3.0.tar.gz", hash = "sha256:c554e6c9cf2d5ea1aba5979cc837b3649539ced0e18ece186f055450c86622e2"}, -] -partd = [ - {file = "partd-1.2.0-py3-none-any.whl", hash = "sha256:5c3a5d70da89485c27916328dc1e26232d0e270771bd4caef4a5124b6a457288"}, - {file = "partd-1.2.0.tar.gz", hash = "sha256:aa67897b84d522dcbc86a98b942afab8c6aa2f7f677d904a616b74ef5ddbc3eb"}, -] -pathspec = [ - {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, - {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, -] -pluggy = [ - {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, - {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, -] -py = [ - {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, - {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, -] -pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, -] -pytest = [ - {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, - {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, -] -pytz = [ - {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, - {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, -] -pyyaml = [ - {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, - {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, - {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, - {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, - {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, - {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, - {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, - {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, - {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, - {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, - {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, -] -regex = [ - {file = "regex-2021.7.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407"}, - {file = "regex-2021.7.6-cp36-cp36m-win32.whl", hash = "sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b"}, - {file = "regex-2021.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb"}, - {file = "regex-2021.7.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895"}, - {file = "regex-2021.7.6-cp37-cp37m-win32.whl", hash = "sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5"}, - {file = "regex-2021.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f"}, - {file = "regex-2021.7.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068"}, - {file = "regex-2021.7.6-cp38-cp38-win32.whl", hash = "sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0"}, - {file = "regex-2021.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4"}, - {file = "regex-2021.7.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3"}, - {file = "regex-2021.7.6-cp39-cp39-win32.whl", hash = "sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035"}, - {file = "regex-2021.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c"}, - {file = "regex-2021.7.6.tar.gz", hash = "sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d"}, -] -scipy = [ - {file = "scipy-1.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a15a1f3fc0abff33e792d6049161b7795909b40b97c6cc2934ed54384017ab76"}, - {file = "scipy-1.6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:e79570979ccdc3d165456dd62041d9556fb9733b86b4b6d818af7a0afc15f092"}, - {file = "scipy-1.6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a423533c55fec61456dedee7b6ee7dce0bb6bfa395424ea374d25afa262be261"}, - {file = "scipy-1.6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:33d6b7df40d197bdd3049d64e8e680227151673465e5d85723b3b8f6b15a6ced"}, - {file = "scipy-1.6.1-cp37-cp37m-win32.whl", hash = "sha256:6725e3fbb47da428794f243864f2297462e9ee448297c93ed1dcbc44335feb78"}, - {file = "scipy-1.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:5fa9c6530b1661f1370bcd332a1e62ca7881785cc0f80c0d559b636567fab63c"}, - {file = "scipy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd50daf727f7c195e26f27467c85ce653d41df4358a25b32434a50d8870fc519"}, - {file = "scipy-1.6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f46dd15335e8a320b0fb4685f58b7471702234cba8bb3442b69a3e1dc329c345"}, - {file = "scipy-1.6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0e5b0ccf63155d90da576edd2768b66fb276446c371b73841e3503be1d63fb5d"}, - {file = "scipy-1.6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2481efbb3740977e3c831edfd0bd9867be26387cacf24eb5e366a6a374d3d00d"}, - {file = "scipy-1.6.1-cp38-cp38-win32.whl", hash = "sha256:68cb4c424112cd4be886b4d979c5497fba190714085f46b8ae67a5e4416c32b4"}, - {file = "scipy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:5f331eeed0297232d2e6eea51b54e8278ed8bb10b099f69c44e2558c090d06bf"}, - {file = "scipy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8a51d33556bf70367452d4d601d1742c0e806cd0194785914daf19775f0e67"}, - {file = "scipy-1.6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:83bf7c16245c15bc58ee76c5418e46ea1811edcc2e2b03041b804e46084ab627"}, - {file = "scipy-1.6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:794e768cc5f779736593046c9714e0f3a5940bc6dcc1dba885ad64cbfb28e9f0"}, - {file = "scipy-1.6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5da5471aed911fe7e52b86bf9ea32fb55ae93e2f0fac66c32e58897cfb02fa07"}, - {file = "scipy-1.6.1-cp39-cp39-win32.whl", hash = "sha256:8e403a337749ed40af60e537cc4d4c03febddcc56cd26e774c9b1b600a70d3e4"}, - {file = "scipy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a5193a098ae9f29af283dcf0041f762601faf2e595c0db1da929875b7570353f"}, - {file = "scipy-1.6.1.tar.gz", hash = "sha256:c4fceb864890b6168e79b0e714c585dbe2fd4222768ee90bc1aa0f8218691b11"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -toolz = [ - {file = "toolz-0.11.1-py3-none-any.whl", hash = "sha256:1bc473acbf1a1db4e72a1ce587be347450e8f08324908b8a266b486f408f04d5"}, - {file = "toolz-0.11.1.tar.gz", hash = "sha256:c7a47921f07822fe534fb1c01c9931ab335a4390c782bd28c6bcc7c2f71f3fbf"}, -] -typed-ast = [ - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, - {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, - {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, - {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, - {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, - {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, - {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, - {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, - {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, - {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, -] -typing-extensions = [ - {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, - {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, - {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, -] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] -xarray = [ - {file = "xarray-0.18.2-py3-none-any.whl", hash = "sha256:a0b14b888b90a3ad8ecec5db9862f3fd02f8c0cc15066a3429d37da62fc37219"}, - {file = "xarray-0.18.2.tar.gz", hash = "sha256:5d2e72a228286fcf60f66e16876bd27629a1a70bf64822c565f16515c4d10284"}, -] -zarr = [ - {file = "zarr-2.8.3-py3-none-any.whl", hash = "sha256:9357fb40ce0bffdf81b9aac912c83829618ea367b7898f5a150eecde4cfa1338"}, - {file = "zarr-2.8.3.tar.gz", hash = "sha256:8aece33269ba3ee2af9320aa528d5fe93f76c30e4ad7fdbfb604b1db3f0d779f"}, -] -zipp = [ - {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, - {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, -] diff --git a/pyproject.toml b/pyproject.toml index 7fd9245..bd69e80 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,13 +10,17 @@ xarray = ">=0.17" mypy = "^0.790" scipy = "^1.5.4" numpy = "^1.19.4" -dask = "^^2020.12.0" +dask = "^2021.9.1" +Jinja2 = "^3.0.3" [tool.poetry.dev-dependencies] pytest = "^5.2" black = "^20.8b1" -zarr = "^2.6.1" +flake8 = "^4.0.1" [build-system] requires = ["poetry>=0.12"] build-backend = "poetry.masonry.api" + +[mypy] +plugins = "numpy.typing.mypy_plugin" \ No newline at end of file diff --git a/src/xarray_multiscale/multiscale.py b/src/xarray_multiscale/multiscale.py index bcc5cc8..79a3194 100644 --- a/src/xarray_multiscale/multiscale.py +++ b/src/xarray_multiscale/multiscale.py @@ -1,19 +1,41 @@ +from dask.base import tokenize import numpy as np import dask.array as da import xarray from xarray import DataArray -from typing import Any, List, Optional, Tuple, Union, Sequence, Callable, Dict -from scipy.interpolate import interp1d -from dask.array import coarsen +from typing import ( + Any, + Hashable, + List, + Literal, + Optional, + Tuple, + Union, + Sequence, + Callable, + Dict, + cast, +) +from dask.utils import apply +from dask.core import flatten +from dask.array.routines import aligned_coarsen_chunks +from dask.highlevelgraph import HighLevelGraph +from dask.array import Array +from numpy.typing import NDArray +from xarray.core.utils import is_dict_like + +CHUNK_MODES = ("rechunk", "minimum") def multiscale( array: Any, - reduction: Callable[[Any], Any], + reduction: Callable[[NDArray[Any], Tuple[int, ...], Dict[Any, Any]], NDArray[Any]], scale_factors: Union[Sequence[int], int], - pad_mode: Optional[str] = None, + depth: int = -1, + pad_mode: str = "crop", preserve_dtype: bool = True, - chunks: Optional[Union[Sequence[int], Dict[str, int]]] = None, + chunks: Optional[Union[Sequence[int], Dict[Hashable, int]]] = None, + chunk_mode: Literal["rechunk", "minimum"] = "minimum", chained: bool = True, ) -> List[DataArray]: """ @@ -25,11 +47,20 @@ def multiscale( The array to be downscaled reduction : callable - A function that aggregates chunks of data over windows. See the documentation of `dask.array.coarsen` for the expected + A function that aggregates chunks of data over windows. + See the documentation of `dask.array.coarsen` for the expected signature of this callable. - scale_factors : iterable of ints + scale_factors : int or sequence of ints The desired downscaling factors, one for each axis. + If a single int is provide, it will be broadcasted to all axes. + + depth : int, default=-1 + This value determines the number of downscaled arrays to return + using python indexing semantics. The default value of deth is -1, + which returns all elements from the multiscale collection. + Setting depth to a non-negative integer will return a total of + depth + 1 downscaled arrays. pad_mode : string or None, default=None How arrays should be padded prior to downscaling in order to ensure that each array dimension @@ -39,10 +70,17 @@ def multiscale( preserve_dtype : bool, default=True Determines whether the multiresolution arrays are all cast to the same dtype as the input. - chunks : sequence or dict of ints, or None, default=None. + chunks : sequence or dict of ints, or None, default=None If `chunks` is supplied, all output arrays are returned with this chunking. If not None, this argument is passed directly to the `xarray.DataArray.chunk` method of each output array. + chunk_mode : str, default='rechunk' + `chunk_mode` determines how to interpret the `chunks` keyword argument. + With the default value `rechunk`, all output arrays are rechunked + to the chunk size specified in `chunks`. + If `chunk_mode` is set to 'minimum`, output arrays are rechunked + only if that array has a chunk size smaller than `chunks`. + chained : bool, default=True If True (default), the nth downscaled array is generated by applying the reduction function on the n-1th downscaled array with the user-supplied `scale_factors`. This means that the nth downscaled array directly depends on the n-1th @@ -60,56 +98,74 @@ def multiscale( """ - needs_padding = pad_mode != None - if isinstance(scale_factors, int): - scale_factors = (scale_factors,) * array.ndim - else: - assert len(scale_factors) == array.ndim + if chunk_mode not in CHUNK_MODES: + raise ValueError(f"chunk_mode must be one of {CHUNK_MODES}, not {chunk_mode}") - if pad_mode is None: - # with pad_mode set to "none", dask will trim the data such that it can be tiled - # by the scale factors - padded_shape = np.subtract(array.shape, np.mod(array.shape, scale_factors)) - else: - padded_shape = prepad(array, scale_factors, pad_mode=pad_mode).shape + scale_factors = broadcast_to_rank(scale_factors, array.ndim) + normalized = normalize_array(array, scale_factors, pad_mode=None) + needs_padding = not (pad_mode == "crop") - levels = range( - 0, 1 + get_downscale_depth(padded_shape, scale_factors, pad=needs_padding) + all_levels = tuple( + range( + 0, + 1 + get_downscale_depth(normalized.shape, scale_factors, pad=needs_padding), + ) ) - scales = tuple(tuple(s ** l for s in scale_factors) for l in levels) - result = [_ingest_array(array, scales[0])] - for level in levels[1:]: - if chained: - scale = scale_factors - downscaled = downscale(result[-1], reduction, scale, pad_mode=pad_mode) - else: - scale = scales[level] - downscaled = downscale(result[0], reduction, scale, pad_mode=pad_mode) - result.append(downscaled) + if depth < 0: + + indexer = slice(len(all_levels) + depth + 1) + else: + indexer = slice(depth + 1) + + levels = all_levels[indexer] + scales = tuple(tuple(s ** level for s in scale_factors) for level in levels) + result = [normalized] + + if len(levels) > 1: + for level in levels[1:]: + if chained: + scale = scale_factors + source = result[-1] + else: + scale = scales[level] + source = result[0] + downscaled = downscale(source, reduction, scale, pad_mode=pad_mode) + result.append(downscaled) if preserve_dtype: result = [r.astype(array.dtype) for r in result] if chunks is not None: - if isinstance(chunks, Sequence): - _chunks = {k: v for k, v in zip(result[0].dims, chunks)} + if isinstance(chunks, int): + new_chunks = ({k: chunks for k in result[0].dims},) * len(result) + elif isinstance(chunks, Sequence): + new_chunks = ({k: v for k, v in zip(result[0].dims, chunks)},) * len(result) elif isinstance(chunks, dict): - _chunks = chunks + new_chunks = (chunks,) * len(result) else: raise ValueError( - f"Chunks must be an instance or a dict, not {type(chunks)}" + f"Chunks must be an int, a Sequence, or a dict, not {type(chunks)}" + ) + + if chunk_mode == "minimum": + new_chunks = tuple( + ensure_minimum_chunks(r.data, normalize_chunks(r.data, chunks)) + for r in result ) - result = [r.chunk(_chunks) for r in result] + result = [r.chunk(ch) for r, ch in zip(result, new_chunks)] return result -def _ingest_array(array: Any, scales: Sequence[int]): +def normalize_array( + array: Any, scale_factors: Sequence[int], pad_mode: Union[str, None] +) -> DataArray: """ - Ingest an array in preparation for downscaling + Ingest an array in preparation for downscaling by converting to DataArray + and cropping / padding as needed. """ - if hasattr(array, "coords"): + if isinstance(array, DataArray): # if the input is a xarray.DataArray, assign a new variable to the DataArray and use the variable # `array` to refer to the data property of that array data = da.asarray(array.data) @@ -121,33 +177,17 @@ def _ingest_array(array: Any, scales: Sequence[int]): else: data = da.asarray(array) dims = tuple(f"dim_{d}" for d in range(data.ndim)) + offset = 0.0 coords = { - dim: DataArray(offset + np.arange(s, dtype="float"), dims=dim) - for dim, s, offset in zip(dims, array.shape, get_downsampled_offset(scales)) + dim: DataArray(offset + np.arange(shp, dtype="float"), dims=dim) + for dim, shp in zip(dims, array.shape) } name = None attrs = {} - result = DataArray(data=data, coords=coords, dims=dims, attrs=attrs, name=name) - return result - - -def even_padding(length: int, window: int) -> int: - """ - Compute how much to add to `length` such that the resulting value is evenly divisible by `window`. - - Parameters - ---------- - length : int - - window : int - - Returns - ------- - int - Value that, when added to `length`, results in a sum that is evenly divided by `window` - """ - return (window - (length % window)) % window + dataArray = DataArray(data=data, coords=coords, dims=dims, attrs=attrs, name=name) + reshaped = adjust_shape(dataArray, scale_factors=scale_factors, mode=pad_mode) + return reshaped def logn(x: float, n: float) -> float: @@ -169,14 +209,11 @@ def logn(x: float, n: float) -> float: return result -def prepad( - array: Any, - scale_factors: Sequence[int], - pad_mode: Optional[str] = "reflect", - rechunk: bool = True, -) -> da.array: +def adjust_shape( + array: DataArray, scale_factors: Sequence[int], mode: Union[str, None] +) -> DataArray: """ - Lazily pad an array such that its new dimensions are evenly divisible by some integer. + Pad or crop array such that its new dimensions are evenly divisible by a set of integers. Parameters ---------- @@ -188,134 +225,101 @@ def prepad( by the corresponding scale factor, and chunks that are smaller than or equal to the scale factor (if the array has chunks) - pad_mode : str - The edge mode used by the padding routine. This parameter will be passed to + mode : str + If set to "crop", then the input array will be cropped as needed. Otherwise, + this is the edge mode used by the padding routine. This parameter will be passed to `dask.array.pad` as the `mode` keyword. Returns ------- dask array """ - - if pad_mode == None: - # no op - return array - - pw = tuple( - (0, even_padding(ax, scale)) for ax, scale in zip(array.shape, scale_factors) - ) - - result = da.pad(array, pw, mode=pad_mode) - - # rechunk so that small extra chunks added by padding are fused into larger chunks, but only if we had to add chunks after padding - if rechunk and np.any(pw): - new_chunks = tuple( - np.multiply( - scale_factors, np.ceil(np.divide(result.chunksize, scale_factors)) - ).astype("int") - ) - result = result.rechunk(new_chunks) - - if hasattr(array, "coords"): - new_coords = {} - for p, k in zip(pw, array.coords): - old_coord = array.coords[k] - if np.diff(p) == 0: - new_coords[k] = old_coord - else: - extended_coords = interp1d( - np.arange(len(old_coord.values)), - old_coord.values, - fill_value="extrapolate", - )(np.arange(len(old_coord.values) + p[-1])).astype(old_coord.dtype) - new_coords[k] = DataArray( - extended_coords, dims=k, attrs=old_coord.attrs - ) - result = DataArray( - result, coords=new_coords, dims=array.dims, attrs=array.attrs - ) + result = array + misalignment = np.any(np.mod(array.shape, scale_factors)) + if misalignment and (mode is not None): + if mode == "crop": + new_shape = np.subtract(array.shape, np.mod(array.shape, scale_factors)) + result = array.isel({d: slice(s) for d, s in zip(array.dims, new_shape)}) + else: + new_shape = np.add( + array.shape, + np.subtract(scale_factors, np.mod(array.shape, scale_factors)), + ) + pw = { + dim: (0, int(new - old)) + for dim, new, old in zip(array.dims, new_shape, array.shape) + if old != new + } + result = array.pad(pad_width=pw, mode=mode) return result -def downscale( - array: Union[np.array, da.array, xarray.DataArray], - reduction: Callable, - scale_factors: Sequence[int], - pad_mode: Optional[str] = None, - **kwargs, -) -> DataArray: - """ - Downscale an array using windowed aggregation. This function is a light wrapper for `dask.array.coarsen`. - - Parameters - ---------- - array : numpy array, dask array, xarray DataArray - The array to be downscaled. - - reduction : callable - A function that aggregates chunks of data over windows. See the documentation of `dask.array.coarsen` for the expected - signature of this callable. - - scale_factors : iterable of ints - The desired downscaling factors, one for each axis. +def downscale_dask( + array: Any, + reduction: Callable[[NDArray[Any], Tuple[int, ...]], NDArray[Any]], + scale_factors: Union[int, Sequence[int], Dict[int, int]], + **kwargs: Any, +) -> Any: - trim_excess : bool, default=False - Whether the size of the input array should be increased or decreased such that - each scale factor tiles its respective array axis. Defaults to False, which will result in the input being padded. + if not np.all((np.array(array.shape) % np.array(scale_factors)) == 0): + raise ValueError( + f"Coarsening factors {scale_factors} do not align with array shape {array.shape}." + ) - **kwargs - extra kwargs passed to dask.array.coarsen + array = align_chunks(array, scale_factors) + name = "downscale-" + tokenize(reduction, array, scale_factors) + dsk = { + (name,) + key[1:]: (apply, reduction, [key, scale_factors], kwargs) + for key in flatten(array.__dask_keys__()) + } + chunks = tuple( + tuple(int(size // scale_factors[axis]) for size in sizes) + for axis, sizes in enumerate(array.chunks) + ) - Returns the downscaled version of the input as a dask array. - ------- - """ + meta = reduction( + np.empty(scale_factors, dtype=array.dtype), scale_factors, **kwargs + ) + graph = HighLevelGraph.from_collections(name, dsk, dependencies=[array]) + return Array(graph, name, chunks, meta=meta) - trim_excess = False - if pad_mode == None: - trim_excess = True - to_coarsen = prepad(da.asarray(array), scale_factors, pad_mode=pad_mode) +def downscale( + array: DataArray, + reduction: Callable[[NDArray[Any], Tuple[int, ...]], NDArray[Any]], + scale_factors: Sequence[int], + pad_mode: str, + **kwargs: Any, +) -> Any: - coarsened = coarsen( - reduction, - to_coarsen, - {d: s for d, s in enumerate(scale_factors)}, - trim_excess=trim_excess, - **kwargs, + to_downscale = normalize_array(array, scale_factors, pad_mode=pad_mode) + downscaled_data = downscale_dask( + to_downscale.data, reduction, scale_factors, **kwargs ) + downscaled_coords = downscale_coords(to_downscale, scale_factors) + return DataArray(downscaled_data, downscaled_coords, attrs=array.attrs) - if isinstance(array, xarray.DataArray): - base_coords = array.coords - new_coords = base_coords - if len(base_coords) > 0: - new_coords = tuple( - DataArray( - (offset * abs(base_coords[bc][1] - base_coords[bc][0])) - + (base_coords[bc][:s] * sc) - - base_coords[bc][0], - name=base_coords[bc].name, - attrs=base_coords[bc].attrs, - ) - for s, bc, offset, sc in zip( - coarsened.shape, - base_coords, - get_downsampled_offset(scale_factors), - scale_factors, - ) - ) - coarsened = DataArray( - coarsened, - dims=array.dims, - coords=new_coords, - attrs=array.attrs, - name=array.name, - ) - return coarsened +def downscale_coords( + array: DataArray, scale_factors: Sequence[int] +) -> Dict[Hashable, Any]: + """ + Take the windowed mean of each coordinate array. + """ + new_coords = {} + for ( + coord_name, + coord, + ) in array.coords.items(): + coarsening_dims = { + d: scale_factors[idx] for idx, d in enumerate(array.dims) if d in coord.dims + } + new_coords[coord_name] = coord.coarsen(coarsening_dims).mean() + return new_coords def get_downscale_depth( - shape: Tuple[int, ...], scale_factors: Sequence[int], pad=False + shape: Tuple[int, ...], scale_factors: Sequence[int], pad: bool = False ) -> int: """ For an array and a sequence of scale factors, calculate the maximum possible number of downscaling operations. @@ -374,3 +378,71 @@ def slice_span(sl: slice) -> int: Measure the length of a slice """ return sl.stop - sl.start + + +def normalize_chunks( + array: xarray.DataArray, chunks: Union[int, Sequence[int], Dict[Hashable, int]] +) -> Tuple[int, ...]: + + if is_dict_like(chunks): + chunks = {array.get_axis_num(dim): chunk for dim, chunk in chunks.items()} + # normalize to explicit chunks, then take the first element from each + # collection of explicit chunks + chunks = tuple(c[0] for c in da.core.normalize_chunks(chunks, array.shape)) + cast(Tuple[int, ...], chunks) + return chunks + + +def ensure_minimum_chunks( + array: da.core.Array, chunks: Sequence[int] +) -> Tuple[int, ...]: + old_chunks = np.array(array.chunksize) + new_chunks = old_chunks.copy() + chunk_fitness = np.less(old_chunks, chunks) + if np.any(chunk_fitness): + new_chunks[chunk_fitness] = np.array(chunks)[chunk_fitness] + return tuple(new_chunks.tolist()) + else: + return tuple(array.chunks) + + +def broadcast_to_rank( + value: Union[int, Sequence[int], Dict[int, int]], rank: int +) -> Tuple[int, ...]: + result_dict = {} + if isinstance(value, int): + result_dict = {k: value for k in range(rank)} + elif isinstance(value, Sequence): + if not (len(value) == rank): + raise ValueError(f"Length of value {len(value)} must match rank: {rank}") + else: + result_dict = {k: v for k, v in enumerate(value)} + elif isinstance(value, dict): + for dim in range(rank): + result_dict[dim] = value.get(dim, 1) + else: + raise ValueError( + f"The first argument must be an int, a sequence of ints, or a dict of ints. Got {type(value)}" + ) + result = tuple(result_dict.values()) + typecheck = tuple(isinstance(val, int) for val in result) + if not all(typecheck): + bad_values = tuple(result[idx] for idx, val in enumerate(typecheck) if not val) + raise ValueError( + f"All elements of the first argument of this function must be ints. Non-integer values: {bad_values}" + ) + return result + + +def align_chunks(array: da.core.Array, scale_factors: Sequence[int]) -> da.core.Array: + """ + Ensure that all chunks are divisible by scale_factors + """ + new_chunks = {} + for idx, factor in enumerate(scale_factors): + aligned = aligned_coarsen_chunks(array.chunks[idx], factor) + if aligned != array.chunks[idx]: + new_chunks[idx] = aligned + if new_chunks: + array = array.rechunk(new_chunks) + return array diff --git a/src/xarray_multiscale/reducers.py b/src/xarray_multiscale/reducers.py index 59d6cfb..f79d381 100644 --- a/src/xarray_multiscale/reducers.py +++ b/src/xarray_multiscale/reducers.py @@ -1,19 +1,38 @@ -from typing import Any, Optional -from scipy.stats import mode as scipy_mode -import numpy as np +from typing import Any, Sequence, Tuple, cast, TypeVar, Dict +from scipy.stats import mode +from numpy.typing import NDArray -def mode(a: Any, axis: Optional[int] = None) -> Any: +def windowed_mean( + array: NDArray[Any], window_size: Tuple[int, ...], **kwargs: Dict[Any, Any] +) -> NDArray[Any]: """ - Coarsening by computing the n-dimensional mode, compatible with da.coarsen. If input is all 0s, the mode is not computed. + Compute the windowed mean of an array. """ - if axis is None: - return a - elif a.max() == 0: - return np.min(a, axis) - else: - transposed = a.transpose(*range(0, a.ndim, 2), *range(1, a.ndim, 2)) - reshaped = transposed.reshape(*transposed.shape[: a.ndim // 2], -1) - modes = scipy_mode(reshaped, axis=reshaped.ndim - 1).mode - result = modes.squeeze(axis=-1) - return result + reshaped = reshape_with_windows(array, window_size) + result = reshaped.mean(axis=tuple(range(1, reshaped.ndim, 2)), **kwargs) + cast(NDArray[Any], result) + return result + + +def windowed_mode(array: NDArray[Any], window_size: Tuple[int, ...]) -> NDArray[Any]: + """ + Coarsening by computing the n-dimensional mode. + """ + reshaped = reshape_with_windows(array, window_size) + transposed_shape = tuple(range(0, reshaped.ndim, 2)) + tuple( + range(1, reshaped.ndim, 2) + ) + transposed = reshaped.transpose(transposed_shape) + collapsed = transposed.reshape(tuple(reshaped.shape[slice(0, None, 2)]) + (-1,)) + result = mode(collapsed, axis=collapsed.ndim - 1).mode.squeeze(axis=-1) + return result + + +def reshape_with_windows( + array: NDArray[Any], window_size: Sequence[int] +) -> NDArray[Any]: + new_shape = () + for s, f in zip(array.shape, window_size): + new_shape += (s // f, f) + return array.reshape(new_shape) diff --git a/tests/test_multiscale.py b/tests/test_multiscale.py index 50f7f83..941107d 100644 --- a/tests/test_multiscale.py +++ b/tests/test_multiscale.py @@ -1,12 +1,17 @@ import pytest -from xarray.core import dataarray from xarray_multiscale.multiscale import ( + align_chunks, downscale, - prepad, + broadcast_to_rank, + adjust_shape, + downscale_coords, + downscale_dask, multiscale, - even_padding, get_downscale_depth, + normalize_chunks, + ensure_minimum_chunks ) +from xarray_multiscale.reducers import reshape_with_windows, windowed_mean, windowed_mode import dask.array as da import numpy as np from xarray import DataArray @@ -26,48 +31,82 @@ def test_downscale_depth(): assert get_downscale_depth((1500, 5495, 5200), (2, 2, 2)) == 10 -@pytest.mark.parametrize(("size", "scale"), ((10, 2), (11, 2), (12, 2), (13, 2))) -def test_even_padding(size: int, scale: int) -> None: - assert (size + even_padding(size, scale)) % scale == 0 - - -@pytest.mark.parametrize("dim", (1, 2, 3, 4)) -def test_prepad(dim: int) -> None: - size = (10,) * dim - chunks = (9,) * dim - scale = (2,) * dim - - arr = da.zeros(size, chunks=chunks) - arr2 = DataArray(arr) - - padded = prepad(arr, scale) - assert np.all(np.mod(padded.shape, scale) == 0) - - padded2 = prepad(arr2, scale) - assert np.all(np.mod(padded2.shape, scale) == 0) - +@pytest.mark.parametrize(("size", "scale"), ((10, 2), (11, 2), ((10,11), (2,3)))) +def test_adjust_shape(size, scale): + arr = DataArray(np.zeros(size)) + padded = adjust_shape(arr, scale, mode="constant") + scale_array = np.array(scale) + old_shape_array = np.array(arr.shape) + new_shape_array = np.array(padded.shape) + + if np.all((old_shape_array % scale_array) == 0): + assert np.array_equal(new_shape_array, old_shape_array) + else: + assert np.array_equal(new_shape_array, old_shape_array + ((scale_array - (old_shape_array % scale_array)))) + + cropped = adjust_shape(arr, scale, mode="crop") + new_shape_array = np.array(cropped.shape) + if np.all((old_shape_array % scale_array) == 0): + assert np.array_equal(new_shape_array, old_shape_array) + else: + assert np.array_equal(new_shape_array, old_shape_array - (old_shape_array % scale_array)) def test_downscale_2d(): chunks = (2, 2) scale = (2, 1) - arr_numpy = np.array( + data = DataArray(da.from_array(np.array( [[1, 0, 1, 0], [0, 1, 0, 1], [1, 0, 1, 0], [0, 1, 0, 1]], dtype="uint8" - ) - arr_dask = da.from_array(arr_numpy, chunks=chunks) - arr_xarray = DataArray(arr_dask) - - downscaled_numpy_float = downscale(arr_numpy, np.mean, scale).compute() - - downscaled_dask_float = downscale(arr_dask, np.mean, scale).compute() - - downscaled_xarray_float = downscale(arr_xarray, np.mean, scale).compute() - - answer_float = np.array([[0.5, 0.5, 0.5, 0.5], [0.5, 0.5, 0.5, 0.5]]) - - assert np.array_equal(downscaled_numpy_float, answer_float) - assert np.array_equal(downscaled_dask_float, answer_float) - assert np.array_equal(downscaled_xarray_float, answer_float) + ), chunks=chunks)) + answer = DataArray(np.array([[0.5, 0.5, 0.5, 0.5], [0.5, 0.5, 0.5, 0.5]])) + downscaled = downscale(data, windowed_mean, scale, pad_mode='crop').compute() + assert np.array_equal(downscaled, answer) + + +def test_downscale_coords(): + data = DataArray(np.zeros((10, 10)), dims=('x','y'), coords={'x': np.arange(10)}) + scale_factors = (2,1) + downscaled = downscale_coords(data, scale_factors) + answer = {'x': data['x'].coarsen({'x' : scale_factors[0]}).mean()} + + assert downscaled.keys() == answer.keys() + for k in downscaled: + assert_equal(answer[k], downscaled[k]) + + data = DataArray(np.zeros((10, 10)), + dims=('x','y'), + coords={'x': np.arange(10), + 'y': 5 + np.arange(10)}) + scale_factors = (2,1) + downscaled = downscale_coords(data, scale_factors) + answer = {'x': data['x'].coarsen({'x' : scale_factors[0]}).mean(), + 'y' : data['y'].coarsen({'y' : scale_factors[1]}).mean()} + + assert downscaled.keys() == answer.keys() + for k in downscaled: + assert_equal(answer[k], downscaled[k]) + + data = DataArray(np.zeros((10, 10)), + dims=('x','y'), + coords={'x': np.arange(10), + 'y': 5 + np.arange(10), + 'foo' : 5}) + scale_factors = (2,2) + downscaled = downscale_coords(data, scale_factors) + answer = {'x': data['x'].coarsen({'x' : scale_factors[0]}).mean(), + 'y' : data['y'].coarsen({'y' : scale_factors[1]}).mean(), + 'foo': data['foo']} + + assert downscaled.keys() == answer.keys() + for k in downscaled: + assert_equal(answer[k], downscaled[k]) + + +def test_invalid_multiscale(): + with pytest.raises(ValueError): + downscale_dask(np.arange(10), windowed_mean, (3,)) + with pytest.raises(ValueError): + downscale_dask(np.arange(16).reshape(4,4), windowed_mean, (3,3)) def test_multiscale(): @@ -81,10 +120,10 @@ def test_multiscale(): base_array = np.tile(cell, np.ceil(np.divide(shape, chunks)).astype("int"))[ cropslice ] - pyr_trimmed = multiscale(base_array, np.mean, 2, pad_mode=None) - pyr_padded = multiscale(base_array, np.mean, 2, pad_mode="reflect") + pyr_trimmed = multiscale(base_array, windowed_mean, 2, pad_mode="crop") + pyr_padded = multiscale(base_array, windowed_mean, 2, pad_mode="constant") pyr_trimmed_unchained = multiscale( - base_array, np.mean, 2, pad_mode=None, chained=False + base_array, windowed_mean, 2, pad_mode="crop", chained=False ) assert [p.shape for p in pyr_padded] == [ shape, @@ -109,6 +148,52 @@ def test_multiscale(): assert np.allclose(pyr_padded[0].data.mean().compute(), 0.17146776406035666) +def test_chunking(): + ndim = 3 + shape = (9,) * ndim + base_array = da.zeros(shape, chunks=(1,) * ndim) + chunks = (1,) * ndim + reducer = windowed_mean + multi = multiscale(base_array, reducer, 2, chunks=chunks) + assert all([m.data.chunksize == chunks for m in multi]) + + chunks = (3,) * ndim + multi = multiscale(base_array, reducer, 2, chunks=chunks) + for m in multi: + assert m.data.chunksize == chunks or m.data.chunksize == m.data.shape + + chunks = (3,) * ndim + multi = multiscale(base_array, reducer, 2, chunks=chunks, chunk_mode='minimum') + for m in multi: + assert np.greater_equal(m.data.chunksize, chunks).all() or m.data.chunksize == m.data.shape + + chunks = 3 + multi = multiscale(base_array, reducer, 2, chunks=chunks, chunk_mode='minimum') + for m in multi: + assert np.greater_equal(m.data.chunksize, (chunks,) * ndim).all() or m.data.chunksize == m.data.shape + + +def test_depth(): + ndim = 3 + shape = (16,) * ndim + base_array = np.zeros(shape) + reducer = windowed_mean + full = multiscale(base_array, reducer, 2, depth=-1) + assert len(full) == 5 + + partial = multiscale(base_array, reducer, 2, depth=-2) + assert len(partial) == len(full) - 1 + [assert_equal(a,b) for a,b in zip(full, partial)] + + partial = multiscale(base_array, reducer, 2, depth=2) + assert len(partial) == 3 + [assert_equal(a,b) for a,b in zip(full, partial)] + + partial = multiscale(base_array, reducer, 2, depth=0) + assert len(partial) == 1 + [assert_equal(a,b) for a,b in zip(full, partial)] + + def test_coords(): dims = ("z", "y", "x") shape = (16,) * len(dims) @@ -123,7 +208,46 @@ def test_coords(): dataarray = DataArray(base_array, coords=coords) downscaled = dataarray.coarsen({"z": 2, "y": 2, "x": 2}).mean() - multi = multiscale(dataarray, np.mean, (2, 2, 2), preserve_dtype=False) + multi = multiscale(dataarray, windowed_mean, (2, 2, 2), preserve_dtype=False) assert_equal(multi[0], dataarray) assert_equal(multi[1], downscaled) + + +def test_normalize_chunks(): + data = DataArray(da.zeros((4,6), chunks=(1,1))) + assert normalize_chunks(data, {'dim_0' : 2, 'dim_1' : 1}) == (2,1) + + +def test_ensure_minimum_chunks(): + data = da.zeros((4,6), chunks=(1,1)) + assert ensure_minimum_chunks(data, (2,2)) == (2,2) + + data = da.zeros((4,6), chunks=(4,1)) + assert ensure_minimum_chunks(data, (2,2)) == (4,2) + + +def test_broadcast_to_rank(): + assert broadcast_to_rank(2, 1) == (2,) + assert broadcast_to_rank(2, 2) == (2,2) + assert broadcast_to_rank((2,3), 2) == (2,3) + assert broadcast_to_rank({0 : 2}, 3) == (2,1,1) + + +def test_align_chunks(): + data = da.arange(10, chunks=1) + rechunked = align_chunks(data, scale_factors=(2,)) + assert rechunked.chunks == ((2,) * 5,) + + data = da.arange(10, chunks=2) + rechunked = align_chunks(data, scale_factors=(2,)) + assert rechunked.chunks == ((2,) * 5,) + + data = da.arange(10, chunks=(1,1,3,5)) + rechunked = align_chunks(data, scale_factors=(2,)) + assert rechunked.chunks == ((2, 2, 2, 4,),) + + +def test_reshape_with_windows(): + data = np.arange(36).reshape(6,6) + assert reshape_with_windows(data, (2,2)).shape == (3,2,3,2) \ No newline at end of file diff --git a/tests/test_reducers.py b/tests/test_reducers.py index 3b16c43..3ca6332 100644 --- a/tests/test_reducers.py +++ b/tests/test_reducers.py @@ -1,41 +1,20 @@ -from typing import Callable -from xarray_multiscale.reducers import mode -import dask.array as da -from scipy.stats import mode as scipy_mode -from typing import Any +from xarray_multiscale.reducers import windowed_mean, windowed_mode import numpy as np -def modefunc(v): - return scipy_mode(v).mode +def test_windowed_mode(): + data = np.arange(16) % 3 + np.arange(16) % 2 + answer = np.array([2, 0, 1, 2]) + results = windowed_mode(data, (4,)) + assert np.array_equal(results, answer) + data = np.arange(16).reshape(4,4) % 3 + answer = np.array([[1,0],[0,2]]) + results = windowed_mode(data, (2,2)) + assert np.array_equal(results, answer) -def coarsened_comparator( - func: Callable, source_array: Any, coarsened_array: Any -) -> Any: - """ - Take a reducer function and two arrays; reduce the first array, - and check that the result is identical to the second array. - """ - result = np.array([True]).reshape((1,) * source_array.ndim) - if np.array_equal(func(source_array), coarsened_array): - result *= False - return result - - -def test_mode2(): - ndim = 2 - data_da = da.random.randint(0, 4, size=(2 ** 3,) * ndim, chunks=(2,) * ndim) - coarsened = da.coarsen(mode, data_da, {idx: 2 for idx in range(data_da.ndim)}) - results = da.map_blocks( - coarsened_comparator, modefunc, data_da, coarsened, dtype="bool" - ).compute() - assert np.all(results) - - ndim = 3 - data_da = da.random.randint(0, 4, size=(2 ** 3,) * ndim, chunks=(2,) * ndim) - coarsened = da.coarsen(mode, data_da, {idx: 2 for idx in range(data_da.ndim)}) - results = da.map_blocks( - coarsened_comparator, modefunc, data_da, coarsened, dtype="bool" - ).compute() - assert np.all(results) +def test_windowed_mean(): + data = np.arange(16).reshape(4,4) % 2 + answer = np.array([[0.5, 0.5],[0.5, 0.5]]) + results = windowed_mean(data, (2,2)) + assert np.array_equal(results, answer) \ No newline at end of file